From 5d9bc9a0c207fbd76184386261c2e8d3ffee75a9 Mon Sep 17 00:00:00 2001 From: Dmitry Teselkin Date: Fri, 17 Jun 2016 15:29:38 +0000 Subject: [PATCH] Patched version from Ubuntu 16.04 blueprint: mos-xenial Change-Id: Icc100cb6f7905d62fef5ad5aeaef48b758b1a9d7 --- debian/NEWS | 26 + debian/README.Debian | 22 + debian/changelog | 874 + debian/clean | 2 + debian/compat | 1 + debian/control | 69 + debian/copyright | 261 + debian/dconv/LICENSE | 202 + debian/dconv/NOTICE | 13 + debian/dconv/README.md | 21 + debian/dconv/css/check.png | Bin 0 -> 531 bytes debian/dconv/css/cross.png | Bin 0 -> 640 bytes debian/dconv/css/page.css | 223 + debian/dconv/haproxy-dconv.py | 534 + debian/dconv/img/logo-med.png | Bin 0 -> 3522 bytes debian/dconv/js/typeahead.bundle.js | 2451 +++ debian/dconv/parser/__init__.py | 81 + debian/dconv/parser/arguments.py | 132 + debian/dconv/parser/example.py | 77 + debian/dconv/parser/keyword.py | 142 + debian/dconv/parser/seealso.py | 32 + debian/dconv/parser/table.py | 244 + debian/dconv/parser/underline.py | 16 + debian/dconv/templates/parser/arguments.tpl | 9 + debian/dconv/templates/parser/example.tpl | 12 + .../templates/parser/example/comment.tpl | 1 + debian/dconv/templates/parser/seealso.tpl | 1 + debian/dconv/templates/parser/table.tpl | 11 + .../dconv/templates/parser/table/header.tpl | 6 + debian/dconv/templates/parser/table/row.tpl | 36 + debian/dconv/templates/parser/underline.tpl | 1 + debian/dconv/templates/summary.html | 43 + debian/dconv/templates/template.html | 238 + debian/dconv/tools/generate-docs.sh | 177 + debian/gbp.conf | 4 + debian/halog.1 | 108 + debian/haproxy-doc.doc-base | 9 + debian/haproxy-doc.install | 6 + debian/haproxy-doc.links | 4 + debian/haproxy.README.Debian | 17 + debian/haproxy.cfg | 35 + debian/haproxy.default | 10 + debian/haproxy.dirs | 4 + debian/haproxy.docs | 4 + debian/haproxy.examples | 1 + debian/haproxy.init | 196 + debian/haproxy.install | 4 + debian/haproxy.lintian-overrides | 1 + debian/haproxy.maintscript | 1 + debian/haproxy.manpages | 3 + debian/haproxy.postinst | 16 + debian/haproxy.postrm | 16 + debian/haproxy.tmpfile | 1 + debian/haproxy.vim | 2 + debian/logrotate.conf | 11 + ...2-Use-dpkg-buildflags-to-build-halog.patch | 42 + ...e-configuration-statement-to-haproxy.patch | 262 + debian/patches/debianize-dconv.patch | 168 + .../haproxy.service-add-documentation.patch | 15 + ...y.service-check-config-before-reload.patch | 20 + .../haproxy.service-start-after-syslog.patch | 18 + ...xy.service-use-environment-variables.patch | 27 + debian/patches/series | 6 + debian/rsyslog.conf | 7 + debian/rules | 81 + debian/source/format | 1 + debian/source/include-binaries | 3 + debian/vim-haproxy.install | 3 + debian/vim-haproxy.yaml | 5 + debian/watch | 2 + haproxy-1.6.3/.gitignore | 71 + haproxy-1.6.3/CHANGELOG | 5612 ++++++ haproxy-1.6.3/CONTRIBUTING | 654 + haproxy-1.6.3/LICENSE | 37 + haproxy-1.6.3/MAINTAINERS | 60 + haproxy-1.6.3/Makefile | 865 + haproxy-1.6.3/README | 500 + haproxy-1.6.3/ROADMAP | 102 + haproxy-1.6.3/SUBVERS | 2 + haproxy-1.6.3/VERDATE | 2 + haproxy-1.6.3/VERSION | 1 + haproxy-1.6.3/contrib/base64/base64rev-gen.c | 70 + haproxy-1.6.3/contrib/halog/Makefile | 22 + haproxy-1.6.3/contrib/halog/fgets2.c | 262 + haproxy-1.6.3/contrib/halog/halog.c | 1794 ++ haproxy-1.6.3/contrib/ip6range/Makefile | 13 + haproxy-1.6.3/contrib/ip6range/ip6range.c | 396 + haproxy-1.6.3/contrib/iprange/Makefile | 13 + haproxy-1.6.3/contrib/iprange/iprange.c | 201 + haproxy-1.6.3/contrib/netsnmp-perl/README | 111 + .../cacti_data_query_haproxy_backends.xml | 750 + .../cacti_data_query_haproxy_frontends.xml | 750 + haproxy-1.6.3/contrib/netsnmp-perl/haproxy.pl | 249 + .../contrib/netsnmp-perl/haproxy_backend.xml | 83 + .../contrib/netsnmp-perl/haproxy_frontend.xml | 83 + .../contrib/netsnmp-perl/haproxy_socket.xml | 90 + haproxy-1.6.3/contrib/selinux/README | 18 + haproxy-1.6.3/contrib/selinux/haproxy.fc | 6 + haproxy-1.6.3/contrib/selinux/haproxy.if | 2 + haproxy-1.6.3/contrib/selinux/haproxy.te | 66 + haproxy-1.6.3/contrib/systemd/Makefile | 8 + .../contrib/systemd/haproxy.service.in | 13 + haproxy-1.6.3/contrib/trace/trace.awk | 70 + haproxy-1.6.3/debian/NEWS | 26 + haproxy-1.6.3/debian/README.Debian | 22 + haproxy-1.6.3/debian/changelog | 868 + haproxy-1.6.3/debian/clean | 2 + haproxy-1.6.3/debian/compat | 1 + haproxy-1.6.3/debian/control | 69 + haproxy-1.6.3/debian/copyright | 261 + haproxy-1.6.3/debian/dconv/LICENSE | 202 + haproxy-1.6.3/debian/dconv/NOTICE | 13 + haproxy-1.6.3/debian/dconv/README.md | 21 + haproxy-1.6.3/debian/dconv/css/check.png | Bin 0 -> 531 bytes haproxy-1.6.3/debian/dconv/css/cross.png | Bin 0 -> 640 bytes haproxy-1.6.3/debian/dconv/css/page.css | 223 + haproxy-1.6.3/debian/dconv/img/logo-med.png | Bin 0 -> 3522 bytes .../debian/dconv/js/typeahead.bundle.js | 2451 +++ haproxy-1.6.3/debian/dconv/parser/__init__.py | 81 + .../debian/dconv/parser/arguments.py | 132 + haproxy-1.6.3/debian/dconv/parser/example.py | 77 + haproxy-1.6.3/debian/dconv/parser/keyword.py | 142 + haproxy-1.6.3/debian/dconv/parser/seealso.py | 32 + haproxy-1.6.3/debian/dconv/parser/table.py | 244 + .../debian/dconv/parser/underline.py | 16 + .../dconv/templates/parser/arguments.tpl | 9 + .../debian/dconv/templates/parser/example.tpl | 12 + .../templates/parser/example/comment.tpl | 1 + .../debian/dconv/templates/parser/seealso.tpl | 1 + .../debian/dconv/templates/parser/table.tpl | 11 + .../dconv/templates/parser/table/header.tpl | 6 + .../dconv/templates/parser/table/row.tpl | 36 + .../dconv/templates/parser/underline.tpl | 1 + .../debian/dconv/templates/summary.html | 43 + .../debian/dconv/templates/template.html | 238 + .../debian/dconv/tools/generate-docs.sh | 177 + haproxy-1.6.3/debian/gbp.conf | 4 + haproxy-1.6.3/debian/halog.1 | 108 + haproxy-1.6.3/debian/haproxy.README.Debian | 17 + haproxy-1.6.3/debian/haproxy.cfg | 35 + haproxy-1.6.3/debian/haproxy.default | 10 + haproxy-1.6.3/debian/haproxy.dirs | 4 + haproxy-1.6.3/debian/haproxy.docs | 4 + haproxy-1.6.3/debian/haproxy.examples | 1 + haproxy-1.6.3/debian/haproxy.init | 196 + haproxy-1.6.3/debian/haproxy.install | 4 + .../debian/haproxy.lintian-overrides | 1 + haproxy-1.6.3/debian/haproxy.maintscript | 1 + haproxy-1.6.3/debian/haproxy.manpages | 3 + haproxy-1.6.3/debian/haproxy.postinst | 16 + haproxy-1.6.3/debian/haproxy.postrm | 16 + haproxy-1.6.3/debian/haproxy.tmpfile | 1 + haproxy-1.6.3/debian/haproxy.vim | 2 + haproxy-1.6.3/debian/logrotate.conf | 11 + ...e-configuration-statement-to-haproxy.patch | 262 + .../debian/patches/debianize-dconv.patch | 168 + .../haproxy.service-add-documentation.patch | 15 + ...y.service-check-config-before-reload.patch | 20 + .../haproxy.service-start-after-syslog.patch | 18 + ...xy.service-use-environment-variables.patch | 27 + haproxy-1.6.3/debian/patches/series | 6 + haproxy-1.6.3/debian/rsyslog.conf | 7 + haproxy-1.6.3/debian/rules | 81 + haproxy-1.6.3/debian/source/format | 1 + haproxy-1.6.3/debian/source/include-binaries | 3 + haproxy-1.6.3/debian/vim-haproxy.install | 3 + haproxy-1.6.3/debian/vim-haproxy.yaml | 5 + haproxy-1.6.3/debian/watch | 2 + haproxy-1.6.3/doc/acl.fig | 229 + haproxy-1.6.3/doc/architecture.txt | 1448 ++ haproxy-1.6.3/doc/close-options.txt | 46 + haproxy-1.6.3/doc/coding-style.txt | 1267 ++ haproxy-1.6.3/doc/configuration.txt | 15314 ++++++++++++++++ haproxy-1.6.3/doc/cookie-options.txt | 25 + .../doc/design-thoughts/backends-v0.txt | 27 + .../doc/design-thoughts/backends.txt | 125 + .../doc/design-thoughts/be-fe-changes.txt | 74 + .../design-thoughts/binding-possibilities.txt | 167 + .../doc/design-thoughts/buffer-redesign.txt | 129 + haproxy-1.6.3/doc/design-thoughts/buffers.fig | 1052 ++ .../doc/design-thoughts/config-language.txt | 262 + .../doc/design-thoughts/connection-reuse.txt | 224 + .../design-thoughts/connection-sharing.txt | 31 + .../doc/design-thoughts/dynamic-buffers.txt | 41 + .../doc/design-thoughts/entities-v2.txt | 276 + .../doc/design-thoughts/how-it-works.txt | 60 + haproxy-1.6.3/doc/design-thoughts/http2.txt | 277 + .../doc/design-thoughts/http_load_time.url | 5 + .../doc/design-thoughts/rate-shaping.txt | 90 + .../doc/design-thoughts/sess_par_sec.txt | 13 + haproxy-1.6.3/doc/gpl.txt | 340 + haproxy-1.6.3/doc/haproxy.1 | 197 + haproxy-1.6.3/doc/internals/acl.txt | 82 + haproxy-1.6.3/doc/internals/body-parsing.txt | 163 + .../doc/internals/buffer-operations.txt | 128 + haproxy-1.6.3/doc/internals/buffer-ops.fig | 152 + .../doc/internals/connect-status.txt | 28 + .../doc/internals/connection-header.txt | 196 + .../doc/internals/connection-scale.txt | 44 + haproxy-1.6.3/doc/internals/entities-v2.txt | 193 + haproxy-1.6.3/doc/internals/entities.fig | 270 + haproxy-1.6.3/doc/internals/entities.pdf | Bin 0 -> 6295 bytes haproxy-1.6.3/doc/internals/entities.svg | 1056 ++ haproxy-1.6.3/doc/internals/entities.txt | 96 + haproxy-1.6.3/doc/internals/hashing.txt | 83 + .../doc/internals/header-parser-speed.txt | 92 + haproxy-1.6.3/doc/internals/header-tree.txt | 124 + haproxy-1.6.3/doc/internals/http-cookies.txt | 45 + haproxy-1.6.3/doc/internals/http-docs.txt | 5 + haproxy-1.6.3/doc/internals/http-parsing.txt | 335 + haproxy-1.6.3/doc/internals/list.fig | 698 + haproxy-1.6.3/doc/internals/lua_socket.fig | 122 + haproxy-1.6.3/doc/internals/lua_socket.pdf | Bin 0 -> 14905 bytes haproxy-1.6.3/doc/internals/naming.txt | 54 + haproxy-1.6.3/doc/internals/pattern.dia | Bin 0 -> 5631 bytes haproxy-1.6.3/doc/internals/pattern.pdf | Bin 0 -> 37269 bytes .../doc/internals/polling-states.fig | 114 + .../doc/internals/repartition-be-fe-fi.txt | 20 + haproxy-1.6.3/doc/internals/sequence.fig | 123 + haproxy-1.6.3/doc/internals/stats-v2.txt | 8 + .../doc/internals/stream-sock-states.fig | 430 + haproxy-1.6.3/doc/intro.txt | 1570 ++ haproxy-1.6.3/doc/lgpl.txt | 504 + haproxy-1.6.3/doc/linux-syn-cookies.txt | 106 + haproxy-1.6.3/doc/lua-api/Makefile | 153 + haproxy-1.6.3/doc/lua-api/_static/channel.fig | 54 + haproxy-1.6.3/doc/lua-api/_static/channel.png | Bin 0 -> 29133 bytes haproxy-1.6.3/doc/lua-api/conf.py | 242 + haproxy-1.6.3/doc/lua-api/index.rst | 1702 ++ haproxy-1.6.3/doc/lua.txt | 967 + haproxy-1.6.3/doc/management.txt | 2099 +++ haproxy-1.6.3/doc/network-namespaces.txt | 106 + haproxy-1.6.3/doc/proxy-protocol.txt | 926 + haproxy-1.6.3/doc/queuing.fig | 192 + haproxy-1.6.3/ebtree/LICENSE | 504 + haproxy-1.6.3/ebtree/compiler.h | 23 + haproxy-1.6.3/ebtree/eb32tree.c | 218 + haproxy-1.6.3/ebtree/eb32tree.h | 499 + haproxy-1.6.3/ebtree/eb64tree.c | 218 + haproxy-1.6.3/ebtree/eb64tree.h | 584 + haproxy-1.6.3/ebtree/ebimtree.c | 44 + haproxy-1.6.3/ebtree/ebimtree.h | 324 + haproxy-1.6.3/ebtree/ebistree.c | 42 + haproxy-1.6.3/ebtree/ebistree.h | 329 + haproxy-1.6.3/ebtree/ebmbtree.c | 77 + haproxy-1.6.3/ebtree/ebmbtree.h | 809 + haproxy-1.6.3/ebtree/ebpttree.c | 208 + haproxy-1.6.3/ebtree/ebpttree.h | 177 + haproxy-1.6.3/ebtree/ebsttree.c | 42 + haproxy-1.6.3/ebtree/ebsttree.h | 324 + haproxy-1.6.3/ebtree/ebtree.c | 32 + haproxy-1.6.3/ebtree/ebtree.h | 917 + haproxy-1.6.3/examples/acl-content-sw.cfg | 130 + haproxy-1.6.3/examples/auth.cfg | 134 + haproxy-1.6.3/examples/check | 540 + haproxy-1.6.3/examples/check.conf | 93 + haproxy-1.6.3/examples/content-sw-sample.cfg | 65 + haproxy-1.6.3/examples/debug2ansi | 2 + haproxy-1.6.3/examples/debug2html | 2 + haproxy-1.6.3/examples/debugfind | 8 + haproxy-1.6.3/examples/errorfiles/400.http | 9 + haproxy-1.6.3/examples/errorfiles/403.http | 9 + haproxy-1.6.3/examples/errorfiles/408.http | 9 + haproxy-1.6.3/examples/errorfiles/500.http | 9 + haproxy-1.6.3/examples/errorfiles/502.http | 9 + haproxy-1.6.3/examples/errorfiles/503.http | 9 + haproxy-1.6.3/examples/errorfiles/504.http | 9 + haproxy-1.6.3/examples/errorfiles/README | 9 + haproxy-1.6.3/examples/haproxy.init | 137 + haproxy-1.6.3/examples/haproxy.spec | 369 + haproxy-1.6.3/examples/haproxy.vim | 164 + haproxy-1.6.3/examples/init.haproxy | 55 + haproxy-1.6.3/examples/option-http_proxy.cfg | 52 + haproxy-1.6.3/examples/seamless_reload.txt | 62 + haproxy-1.6.3/examples/ssl.cfg | 26 + haproxy-1.6.3/examples/stats_haproxy.sh | 78 + haproxy-1.6.3/examples/transparent_proxy.cfg | 55 + haproxy-1.6.3/include/common/accept4.h | 72 + haproxy-1.6.3/include/common/base64.h | 26 + haproxy-1.6.3/include/common/buffer.h | 532 + haproxy-1.6.3/include/common/cfgparse.h | 113 + haproxy-1.6.3/include/common/chunk.h | 142 + haproxy-1.6.3/include/common/compat.h | 156 + haproxy-1.6.3/include/common/compiler.h | 107 + haproxy-1.6.3/include/common/config.h | 73 + haproxy-1.6.3/include/common/debug.h | 70 + haproxy-1.6.3/include/common/defaults.h | 313 + haproxy-1.6.3/include/common/epoll.h | 106 + haproxy-1.6.3/include/common/errors.h | 66 + haproxy-1.6.3/include/common/hash.h | 30 + haproxy-1.6.3/include/common/memory.h | 175 + haproxy-1.6.3/include/common/mini-clist.h | 145 + haproxy-1.6.3/include/common/namespace.h | 25 + haproxy-1.6.3/include/common/rbtree.h | 150 + haproxy-1.6.3/include/common/regex.h | 158 + haproxy-1.6.3/include/common/splice.h | 83 + haproxy-1.6.3/include/common/standard.h | 1048 ++ haproxy-1.6.3/include/common/syscall.h | 159 + haproxy-1.6.3/include/common/template.h | 34 + haproxy-1.6.3/include/common/ticks.h | 154 + haproxy-1.6.3/include/common/time.h | 553 + haproxy-1.6.3/include/common/tools.h | 51 + haproxy-1.6.3/include/common/uri_auth.h | 95 + haproxy-1.6.3/include/common/version.h | 70 + haproxy-1.6.3/include/import/51d.h | 9 + haproxy-1.6.3/include/import/da.h | 11 + haproxy-1.6.3/include/import/lru.h | 74 + haproxy-1.6.3/include/import/xxhash.h | 156 + haproxy-1.6.3/include/proto/acl.h | 154 + haproxy-1.6.3/include/proto/action.h | 71 + haproxy-1.6.3/include/proto/applet.h | 97 + haproxy-1.6.3/include/proto/arg.h | 72 + haproxy-1.6.3/include/proto/auth.h | 37 + haproxy-1.6.3/include/proto/backend.h | 135 + haproxy-1.6.3/include/proto/channel.h | 459 + haproxy-1.6.3/include/proto/checks.h | 60 + haproxy-1.6.3/include/proto/compression.h | 49 + haproxy-1.6.3/include/proto/connection.h | 596 + haproxy-1.6.3/include/proto/dns.h | 49 + haproxy-1.6.3/include/proto/dumpstats.h | 69 + haproxy-1.6.3/include/proto/fd.h | 350 + haproxy-1.6.3/include/proto/freq_ctr.h | 242 + haproxy-1.6.3/include/proto/frontend.h | 38 + haproxy-1.6.3/include/proto/hdr_idx.h | 96 + haproxy-1.6.3/include/proto/hlua.h | 44 + haproxy-1.6.3/include/proto/lb_chash.h | 40 + haproxy-1.6.3/include/proto/lb_fas.h | 39 + haproxy-1.6.3/include/proto/lb_fwlc.h | 39 + haproxy-1.6.3/include/proto/lb_fwrr.h | 39 + haproxy-1.6.3/include/proto/lb_map.h | 43 + haproxy-1.6.3/include/proto/listener.h | 152 + haproxy-1.6.3/include/proto/log.h | 159 + haproxy-1.6.3/include/proto/map.h | 38 + haproxy-1.6.3/include/proto/obj_type.h | 154 + haproxy-1.6.3/include/proto/pattern.h | 215 + haproxy-1.6.3/include/proto/payload.h | 39 + haproxy-1.6.3/include/proto/peers.h | 36 + haproxy-1.6.3/include/proto/pipe.h | 54 + haproxy-1.6.3/include/proto/port_range.h | 77 + haproxy-1.6.3/include/proto/proto_http.h | 301 + haproxy-1.6.3/include/proto/proto_tcp.h | 67 + haproxy-1.6.3/include/proto/proto_udp.h | 27 + haproxy-1.6.3/include/proto/proto_uxst.h | 41 + haproxy-1.6.3/include/proto/protocol.h | 71 + haproxy-1.6.3/include/proto/proxy.h | 154 + haproxy-1.6.3/include/proto/queue.h | 90 + haproxy-1.6.3/include/proto/raw_sock.h | 36 + haproxy-1.6.3/include/proto/sample.h | 66 + haproxy-1.6.3/include/proto/server.h | 226 + haproxy-1.6.3/include/proto/session.h | 73 + haproxy-1.6.3/include/proto/shctx.h | 51 + haproxy-1.6.3/include/proto/signal.h | 43 + haproxy-1.6.3/include/proto/ssl_sock.h | 86 + haproxy-1.6.3/include/proto/stick_table.h | 167 + haproxy-1.6.3/include/proto/stream.h | 319 + .../include/proto/stream_interface.h | 407 + haproxy-1.6.3/include/proto/task.h | 280 + haproxy-1.6.3/include/proto/template.h | 36 + haproxy-1.6.3/include/proto/vars.h | 14 + haproxy-1.6.3/include/types/acl.h | 168 + haproxy-1.6.3/include/types/action.h | 175 + haproxy-1.6.3/include/types/applet.h | 146 + haproxy-1.6.3/include/types/arg.h | 134 + haproxy-1.6.3/include/types/auth.h | 59 + haproxy-1.6.3/include/types/backend.h | 163 + haproxy-1.6.3/include/types/capture.h | 46 + haproxy-1.6.3/include/types/channel.h | 285 + haproxy-1.6.3/include/types/checks.h | 229 + haproxy-1.6.3/include/types/compression.h | 94 + haproxy-1.6.3/include/types/connection.h | 366 + haproxy-1.6.3/include/types/counters.h | 120 + haproxy-1.6.3/include/types/dns.h | 219 + haproxy-1.6.3/include/types/fd.h | 141 + haproxy-1.6.3/include/types/freq_ctr.h | 55 + haproxy-1.6.3/include/types/global.h | 257 + haproxy-1.6.3/include/types/hdr_idx.h | 84 + haproxy-1.6.3/include/types/hlua.h | 148 + haproxy-1.6.3/include/types/lb_chash.h | 42 + haproxy-1.6.3/include/types/lb_fas.h | 40 + haproxy-1.6.3/include/types/lb_fwlc.h | 40 + haproxy-1.6.3/include/types/lb_fwrr.h | 51 + haproxy-1.6.3/include/types/lb_map.h | 44 + haproxy-1.6.3/include/types/listener.h | 234 + haproxy-1.6.3/include/types/log.h | 182 + haproxy-1.6.3/include/types/mailers.h | 65 + haproxy-1.6.3/include/types/map.h | 42 + haproxy-1.6.3/include/types/obj_type.h | 52 + haproxy-1.6.3/include/types/pattern.h | 228 + haproxy-1.6.3/include/types/peers.h | 98 + haproxy-1.6.3/include/types/pipe.h | 45 + haproxy-1.6.3/include/types/port_range.h | 40 + haproxy-1.6.3/include/types/proto_http.h | 414 + haproxy-1.6.3/include/types/proto_udp.h | 52 + haproxy-1.6.3/include/types/protocol.h | 77 + haproxy-1.6.3/include/types/proxy.h | 494 + haproxy-1.6.3/include/types/queue.h | 45 + haproxy-1.6.3/include/types/sample.h | 339 + haproxy-1.6.3/include/types/server.h | 284 + haproxy-1.6.3/include/types/session.h | 57 + haproxy-1.6.3/include/types/signal.h | 49 + haproxy-1.6.3/include/types/ssl_sock.h | 51 + haproxy-1.6.3/include/types/stick_table.h | 205 + haproxy-1.6.3/include/types/stream.h | 173 + .../include/types/stream_interface.h | 120 + haproxy-1.6.3/include/types/task.h | 79 + haproxy-1.6.3/include/types/template.h | 34 + haproxy-1.6.3/include/types/vars.h | 33 + haproxy-1.6.3/src/51d.c | 619 + haproxy-1.6.3/src/acl.c | 1360 ++ haproxy-1.6.3/src/applet.c | 72 + haproxy-1.6.3/src/arg.c | 305 + haproxy-1.6.3/src/auth.c | 296 + haproxy-1.6.3/src/backend.c | 1829 ++ haproxy-1.6.3/src/base64.c | 184 + haproxy-1.6.3/src/buffer.c | 287 + haproxy-1.6.3/src/cfgparse.c | 9004 +++++++++ haproxy-1.6.3/src/channel.c | 470 + haproxy-1.6.3/src/checks.c | 3354 ++++ haproxy-1.6.3/src/chunk.c | 269 + haproxy-1.6.3/src/compression.c | 883 + haproxy-1.6.3/src/connection.c | 841 + haproxy-1.6.3/src/da.c | 367 + haproxy-1.6.3/src/dns.c | 1141 ++ haproxy-1.6.3/src/dumpstats.c | 6678 +++++++ haproxy-1.6.3/src/ev_epoll.c | 282 + haproxy-1.6.3/src/ev_kqueue.c | 253 + haproxy-1.6.3/src/ev_poll.c | 258 + haproxy-1.6.3/src/ev_select.c | 253 + haproxy-1.6.3/src/fd.c | 389 + haproxy-1.6.3/src/freq_ctr.c | 194 + haproxy-1.6.3/src/frontend.c | 229 + haproxy-1.6.3/src/haproxy-systemd-wrapper.c | 214 + haproxy-1.6.3/src/haproxy.c | 1944 ++ haproxy-1.6.3/src/hash.c | 107 + haproxy-1.6.3/src/hdr_idx.c | 72 + haproxy-1.6.3/src/hlua.c | 7088 +++++++ haproxy-1.6.3/src/i386-linux-vsys.c | 212 + haproxy-1.6.3/src/lb_chash.c | 404 + haproxy-1.6.3/src/lb_fas.c | 317 + haproxy-1.6.3/src/lb_fwlc.c | 309 + haproxy-1.6.3/src/lb_fwrr.c | 569 + haproxy-1.6.3/src/lb_map.c | 271 + haproxy-1.6.3/src/listener.c | 835 + haproxy-1.6.3/src/log.c | 2080 +++ haproxy-1.6.3/src/lru.c | 269 + haproxy-1.6.3/src/mailers.c | 17 + haproxy-1.6.3/src/map.c | 276 + haproxy-1.6.3/src/memory.c | 228 + haproxy-1.6.3/src/namespace.c | 113 + haproxy-1.6.3/src/pattern.c | 2466 +++ haproxy-1.6.3/src/payload.c | 980 + haproxy-1.6.3/src/peers.c | 1990 ++ haproxy-1.6.3/src/pipe.c | 114 + haproxy-1.6.3/src/proto_http.c | 13112 +++++++++++++ haproxy-1.6.3/src/proto_tcp.c | 2474 +++ haproxy-1.6.3/src/proto_udp.c | 33 + haproxy-1.6.3/src/proto_uxst.c | 718 + haproxy-1.6.3/src/protocol.c | 122 + haproxy-1.6.3/src/proxy.c | 1222 ++ haproxy-1.6.3/src/queue.c | 249 + haproxy-1.6.3/src/raw_sock.c | 426 + haproxy-1.6.3/src/rbtree.c | 399 + haproxy-1.6.3/src/regex.c | 334 + haproxy-1.6.3/src/sample.c | 2657 +++ haproxy-1.6.3/src/server.c | 2809 +++ haproxy-1.6.3/src/session.c | 465 + haproxy-1.6.3/src/shctx.c | 658 + haproxy-1.6.3/src/signal.c | 229 + haproxy-1.6.3/src/ssl_sock.c | 5446 ++++++ haproxy-1.6.3/src/standard.c | 2895 +++ haproxy-1.6.3/src/stick_table.c | 1528 ++ haproxy-1.6.3/src/stream.c | 3473 ++++ haproxy-1.6.3/src/stream_interface.c | 1522 ++ haproxy-1.6.3/src/task.c | 264 + haproxy-1.6.3/src/time.c | 226 + haproxy-1.6.3/src/trace.c | 247 + haproxy-1.6.3/src/uri_auth.c | 316 + haproxy-1.6.3/src/vars.c | 689 + haproxy-1.6.3/src/xxhash.c | 928 + haproxy-1.6.3/tests/0000-debug-stats.diff | 54 + haproxy-1.6.3/tests/filltab25.c | 399 + haproxy-1.6.3/tests/hash_results.txt | 218 + haproxy-1.6.3/tests/hashing-results.txt | 314 + haproxy-1.6.3/tests/io_limits.txt | 116 + haproxy-1.6.3/tests/ip-hash.c | 202 + haproxy-1.6.3/tests/reset.c | 45 + haproxy-1.6.3/tests/sockstat.txt | 7 + haproxy-1.6.3/tests/test-acl-args.cfg | 36 + haproxy-1.6.3/tests/test-address-syntax.cfg | 84 + haproxy-1.6.3/tests/test-arg.c | 44 + haproxy-1.6.3/tests/test-backlog.cfg | 22 + haproxy-1.6.3/tests/test-check-expect.cfg | 87 + haproxy-1.6.3/tests/test-connection.cfg | 42 + haproxy-1.6.3/tests/test-cookie-indirect.cfg | 47 + haproxy-1.6.3/tests/test-cookie-insert.cfg | 35 + haproxy-1.6.3/tests/test-cookie-passive.cfg | 35 + haproxy-1.6.3/tests/test-cookie-prefix.cfg | 35 + haproxy-1.6.3/tests/test-cookie-rewrite.cfg | 35 + haproxy-1.6.3/tests/test-disable-404.cfg | 61 + haproxy-1.6.3/tests/test-fsm.cfg | 347 + haproxy-1.6.3/tests/test-fwlc.cfg | 61 + haproxy-1.6.3/tests/test-fwrr.cfg | 51 + .../tests/test-http-send-name-hdr.cfg | 33 + haproxy-1.6.3/tests/test-inspect-smtp.cfg | 44 + haproxy-1.6.3/tests/test-inspect-ssl.cfg | 37 + haproxy-1.6.3/tests/test-map-ports.cfg | 31 + haproxy-1.6.3/tests/test-pollers.cfg | 15 + haproxy-1.6.3/tests/test-redirect.cfg | 49 + .../tests/test-sample-fetch-args.cfg | 36 + .../tests/test-sample-fetch-conv.cfg | 42 + haproxy-1.6.3/tests/test-sql.cfg | 29 + haproxy-1.6.3/tests/test-str2sa.cfg | 60 + haproxy-1.6.3/tests/test-time.cfg | 24 + haproxy-1.6.3/tests/test-timeout.cfg | 27 + haproxy-1.6.3/tests/test-url-hash.cfg | 40 + haproxy-1.6.3/tests/test-valid-names.cfg | 37 + haproxy-1.6.3/tests/test.c | 13 + haproxy-1.6.3/tests/test_hashes.c | 559 + haproxy-1.6.3/tests/test_pools.c | 207 + haproxy-1.6.3/tests/testinet.c | 27 + haproxy-1.6.3/tests/uri_hash.c | 377 + 521 files changed, 184981 insertions(+) create mode 100644 debian/NEWS create mode 100644 debian/README.Debian create mode 100644 debian/changelog create mode 100644 debian/clean create mode 100644 debian/compat create mode 100644 debian/control create mode 100644 debian/copyright create mode 100644 debian/dconv/LICENSE create mode 100644 debian/dconv/NOTICE create mode 100644 debian/dconv/README.md create mode 100644 debian/dconv/css/check.png create mode 100644 debian/dconv/css/cross.png create mode 100644 debian/dconv/css/page.css create mode 100755 debian/dconv/haproxy-dconv.py create mode 100644 debian/dconv/img/logo-med.png create mode 100644 debian/dconv/js/typeahead.bundle.js create mode 100644 debian/dconv/parser/__init__.py create mode 100644 debian/dconv/parser/arguments.py create mode 100644 debian/dconv/parser/example.py create mode 100644 debian/dconv/parser/keyword.py create mode 100644 debian/dconv/parser/seealso.py create mode 100644 debian/dconv/parser/table.py create mode 100644 debian/dconv/parser/underline.py create mode 100644 debian/dconv/templates/parser/arguments.tpl create mode 100644 debian/dconv/templates/parser/example.tpl create mode 100644 debian/dconv/templates/parser/example/comment.tpl create mode 100644 debian/dconv/templates/parser/seealso.tpl create mode 100644 debian/dconv/templates/parser/table.tpl create mode 100644 debian/dconv/templates/parser/table/header.tpl create mode 100644 debian/dconv/templates/parser/table/row.tpl create mode 100644 debian/dconv/templates/parser/underline.tpl create mode 100644 debian/dconv/templates/summary.html create mode 100644 debian/dconv/templates/template.html create mode 100755 debian/dconv/tools/generate-docs.sh create mode 100644 debian/gbp.conf create mode 100644 debian/halog.1 create mode 100644 debian/haproxy-doc.doc-base create mode 100644 debian/haproxy-doc.install create mode 100644 debian/haproxy-doc.links create mode 100644 debian/haproxy.README.Debian create mode 100644 debian/haproxy.cfg create mode 100644 debian/haproxy.default create mode 100644 debian/haproxy.dirs create mode 100644 debian/haproxy.docs create mode 100644 debian/haproxy.examples create mode 100644 debian/haproxy.init create mode 100644 debian/haproxy.install create mode 100644 debian/haproxy.lintian-overrides create mode 100644 debian/haproxy.maintscript create mode 100644 debian/haproxy.manpages create mode 100644 debian/haproxy.postinst create mode 100644 debian/haproxy.postrm create mode 100644 debian/haproxy.tmpfile create mode 100644 debian/haproxy.vim create mode 100644 debian/logrotate.conf create mode 100644 debian/patches/0002-Use-dpkg-buildflags-to-build-halog.patch create mode 100644 debian/patches/MIRA0001-Adding-include-configuration-statement-to-haproxy.patch create mode 100644 debian/patches/debianize-dconv.patch create mode 100644 debian/patches/haproxy.service-add-documentation.patch create mode 100644 debian/patches/haproxy.service-check-config-before-reload.patch create mode 100644 debian/patches/haproxy.service-start-after-syslog.patch create mode 100644 debian/patches/haproxy.service-use-environment-variables.patch create mode 100644 debian/patches/series create mode 100644 debian/rsyslog.conf create mode 100755 debian/rules create mode 100644 debian/source/format create mode 100644 debian/source/include-binaries create mode 100644 debian/vim-haproxy.install create mode 100644 debian/vim-haproxy.yaml create mode 100644 debian/watch create mode 100644 haproxy-1.6.3/.gitignore create mode 100644 haproxy-1.6.3/CHANGELOG create mode 100644 haproxy-1.6.3/CONTRIBUTING create mode 100644 haproxy-1.6.3/LICENSE create mode 100644 haproxy-1.6.3/MAINTAINERS create mode 100644 haproxy-1.6.3/Makefile create mode 100644 haproxy-1.6.3/README create mode 100644 haproxy-1.6.3/ROADMAP create mode 100644 haproxy-1.6.3/SUBVERS create mode 100644 haproxy-1.6.3/VERDATE create mode 100644 haproxy-1.6.3/VERSION create mode 100644 haproxy-1.6.3/contrib/base64/base64rev-gen.c create mode 100644 haproxy-1.6.3/contrib/halog/Makefile create mode 100644 haproxy-1.6.3/contrib/halog/fgets2.c create mode 100644 haproxy-1.6.3/contrib/halog/halog.c create mode 100644 haproxy-1.6.3/contrib/ip6range/Makefile create mode 100644 haproxy-1.6.3/contrib/ip6range/ip6range.c create mode 100644 haproxy-1.6.3/contrib/iprange/Makefile create mode 100644 haproxy-1.6.3/contrib/iprange/iprange.c create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/README create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/cacti_data_query_haproxy_backends.xml create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/cacti_data_query_haproxy_frontends.xml create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/haproxy.pl create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/haproxy_backend.xml create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/haproxy_frontend.xml create mode 100644 haproxy-1.6.3/contrib/netsnmp-perl/haproxy_socket.xml create mode 100644 haproxy-1.6.3/contrib/selinux/README create mode 100644 haproxy-1.6.3/contrib/selinux/haproxy.fc create mode 100644 haproxy-1.6.3/contrib/selinux/haproxy.if create mode 100644 haproxy-1.6.3/contrib/selinux/haproxy.te create mode 100644 haproxy-1.6.3/contrib/systemd/Makefile create mode 100644 haproxy-1.6.3/contrib/systemd/haproxy.service.in create mode 100755 haproxy-1.6.3/contrib/trace/trace.awk create mode 100644 haproxy-1.6.3/debian/NEWS create mode 100644 haproxy-1.6.3/debian/README.Debian create mode 100644 haproxy-1.6.3/debian/changelog create mode 100644 haproxy-1.6.3/debian/clean create mode 100644 haproxy-1.6.3/debian/compat create mode 100644 haproxy-1.6.3/debian/control create mode 100644 haproxy-1.6.3/debian/copyright create mode 100644 haproxy-1.6.3/debian/dconv/LICENSE create mode 100644 haproxy-1.6.3/debian/dconv/NOTICE create mode 100644 haproxy-1.6.3/debian/dconv/README.md create mode 100644 haproxy-1.6.3/debian/dconv/css/check.png create mode 100644 haproxy-1.6.3/debian/dconv/css/cross.png create mode 100644 haproxy-1.6.3/debian/dconv/css/page.css create mode 100644 haproxy-1.6.3/debian/dconv/img/logo-med.png create mode 100644 haproxy-1.6.3/debian/dconv/js/typeahead.bundle.js create mode 100644 haproxy-1.6.3/debian/dconv/parser/__init__.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/arguments.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/example.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/keyword.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/seealso.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/table.py create mode 100644 haproxy-1.6.3/debian/dconv/parser/underline.py create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/arguments.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/example.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/example/comment.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/seealso.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/table.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/table/header.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/table/row.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/parser/underline.tpl create mode 100644 haproxy-1.6.3/debian/dconv/templates/summary.html create mode 100644 haproxy-1.6.3/debian/dconv/templates/template.html create mode 100755 haproxy-1.6.3/debian/dconv/tools/generate-docs.sh create mode 100644 haproxy-1.6.3/debian/gbp.conf create mode 100644 haproxy-1.6.3/debian/halog.1 create mode 100644 haproxy-1.6.3/debian/haproxy.README.Debian create mode 100644 haproxy-1.6.3/debian/haproxy.cfg create mode 100644 haproxy-1.6.3/debian/haproxy.default create mode 100644 haproxy-1.6.3/debian/haproxy.dirs create mode 100644 haproxy-1.6.3/debian/haproxy.docs create mode 100644 haproxy-1.6.3/debian/haproxy.examples create mode 100644 haproxy-1.6.3/debian/haproxy.init create mode 100644 haproxy-1.6.3/debian/haproxy.install create mode 100644 haproxy-1.6.3/debian/haproxy.lintian-overrides create mode 100644 haproxy-1.6.3/debian/haproxy.maintscript create mode 100644 haproxy-1.6.3/debian/haproxy.manpages create mode 100644 haproxy-1.6.3/debian/haproxy.postinst create mode 100644 haproxy-1.6.3/debian/haproxy.postrm create mode 100644 haproxy-1.6.3/debian/haproxy.tmpfile create mode 100644 haproxy-1.6.3/debian/haproxy.vim create mode 100644 haproxy-1.6.3/debian/logrotate.conf create mode 100644 haproxy-1.6.3/debian/patches/MIRA0001-Adding-include-configuration-statement-to-haproxy.patch create mode 100644 haproxy-1.6.3/debian/patches/debianize-dconv.patch create mode 100644 haproxy-1.6.3/debian/patches/haproxy.service-add-documentation.patch create mode 100644 haproxy-1.6.3/debian/patches/haproxy.service-check-config-before-reload.patch create mode 100644 haproxy-1.6.3/debian/patches/haproxy.service-start-after-syslog.patch create mode 100644 haproxy-1.6.3/debian/patches/haproxy.service-use-environment-variables.patch create mode 100644 haproxy-1.6.3/debian/patches/series create mode 100644 haproxy-1.6.3/debian/rsyslog.conf create mode 100755 haproxy-1.6.3/debian/rules create mode 100644 haproxy-1.6.3/debian/source/format create mode 100644 haproxy-1.6.3/debian/source/include-binaries create mode 100644 haproxy-1.6.3/debian/vim-haproxy.install create mode 100644 haproxy-1.6.3/debian/vim-haproxy.yaml create mode 100644 haproxy-1.6.3/debian/watch create mode 100644 haproxy-1.6.3/doc/acl.fig create mode 100644 haproxy-1.6.3/doc/architecture.txt create mode 100644 haproxy-1.6.3/doc/close-options.txt create mode 100644 haproxy-1.6.3/doc/coding-style.txt create mode 100644 haproxy-1.6.3/doc/configuration.txt create mode 100644 haproxy-1.6.3/doc/cookie-options.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/backends-v0.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/backends.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/be-fe-changes.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/binding-possibilities.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/buffer-redesign.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/buffers.fig create mode 100644 haproxy-1.6.3/doc/design-thoughts/config-language.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/connection-reuse.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/connection-sharing.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/dynamic-buffers.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/entities-v2.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/how-it-works.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/http2.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/http_load_time.url create mode 100644 haproxy-1.6.3/doc/design-thoughts/rate-shaping.txt create mode 100644 haproxy-1.6.3/doc/design-thoughts/sess_par_sec.txt create mode 100644 haproxy-1.6.3/doc/gpl.txt create mode 100644 haproxy-1.6.3/doc/haproxy.1 create mode 100644 haproxy-1.6.3/doc/internals/acl.txt create mode 100644 haproxy-1.6.3/doc/internals/body-parsing.txt create mode 100644 haproxy-1.6.3/doc/internals/buffer-operations.txt create mode 100644 haproxy-1.6.3/doc/internals/buffer-ops.fig create mode 100644 haproxy-1.6.3/doc/internals/connect-status.txt create mode 100644 haproxy-1.6.3/doc/internals/connection-header.txt create mode 100644 haproxy-1.6.3/doc/internals/connection-scale.txt create mode 100644 haproxy-1.6.3/doc/internals/entities-v2.txt create mode 100644 haproxy-1.6.3/doc/internals/entities.fig create mode 100644 haproxy-1.6.3/doc/internals/entities.pdf create mode 100644 haproxy-1.6.3/doc/internals/entities.svg create mode 100644 haproxy-1.6.3/doc/internals/entities.txt create mode 100644 haproxy-1.6.3/doc/internals/hashing.txt create mode 100644 haproxy-1.6.3/doc/internals/header-parser-speed.txt create mode 100644 haproxy-1.6.3/doc/internals/header-tree.txt create mode 100644 haproxy-1.6.3/doc/internals/http-cookies.txt create mode 100644 haproxy-1.6.3/doc/internals/http-docs.txt create mode 100644 haproxy-1.6.3/doc/internals/http-parsing.txt create mode 100644 haproxy-1.6.3/doc/internals/list.fig create mode 100644 haproxy-1.6.3/doc/internals/lua_socket.fig create mode 100644 haproxy-1.6.3/doc/internals/lua_socket.pdf create mode 100644 haproxy-1.6.3/doc/internals/naming.txt create mode 100644 haproxy-1.6.3/doc/internals/pattern.dia create mode 100644 haproxy-1.6.3/doc/internals/pattern.pdf create mode 100644 haproxy-1.6.3/doc/internals/polling-states.fig create mode 100644 haproxy-1.6.3/doc/internals/repartition-be-fe-fi.txt create mode 100644 haproxy-1.6.3/doc/internals/sequence.fig create mode 100644 haproxy-1.6.3/doc/internals/stats-v2.txt create mode 100644 haproxy-1.6.3/doc/internals/stream-sock-states.fig create mode 100644 haproxy-1.6.3/doc/intro.txt create mode 100644 haproxy-1.6.3/doc/lgpl.txt create mode 100644 haproxy-1.6.3/doc/linux-syn-cookies.txt create mode 100644 haproxy-1.6.3/doc/lua-api/Makefile create mode 100644 haproxy-1.6.3/doc/lua-api/_static/channel.fig create mode 100644 haproxy-1.6.3/doc/lua-api/_static/channel.png create mode 100644 haproxy-1.6.3/doc/lua-api/conf.py create mode 100644 haproxy-1.6.3/doc/lua-api/index.rst create mode 100644 haproxy-1.6.3/doc/lua.txt create mode 100644 haproxy-1.6.3/doc/management.txt create mode 100644 haproxy-1.6.3/doc/network-namespaces.txt create mode 100644 haproxy-1.6.3/doc/proxy-protocol.txt create mode 100644 haproxy-1.6.3/doc/queuing.fig create mode 100644 haproxy-1.6.3/ebtree/LICENSE create mode 100644 haproxy-1.6.3/ebtree/compiler.h create mode 100644 haproxy-1.6.3/ebtree/eb32tree.c create mode 100644 haproxy-1.6.3/ebtree/eb32tree.h create mode 100644 haproxy-1.6.3/ebtree/eb64tree.c create mode 100644 haproxy-1.6.3/ebtree/eb64tree.h create mode 100644 haproxy-1.6.3/ebtree/ebimtree.c create mode 100644 haproxy-1.6.3/ebtree/ebimtree.h create mode 100644 haproxy-1.6.3/ebtree/ebistree.c create mode 100644 haproxy-1.6.3/ebtree/ebistree.h create mode 100644 haproxy-1.6.3/ebtree/ebmbtree.c create mode 100644 haproxy-1.6.3/ebtree/ebmbtree.h create mode 100644 haproxy-1.6.3/ebtree/ebpttree.c create mode 100644 haproxy-1.6.3/ebtree/ebpttree.h create mode 100644 haproxy-1.6.3/ebtree/ebsttree.c create mode 100644 haproxy-1.6.3/ebtree/ebsttree.h create mode 100644 haproxy-1.6.3/ebtree/ebtree.c create mode 100644 haproxy-1.6.3/ebtree/ebtree.h create mode 100644 haproxy-1.6.3/examples/acl-content-sw.cfg create mode 100644 haproxy-1.6.3/examples/auth.cfg create mode 100755 haproxy-1.6.3/examples/check create mode 100644 haproxy-1.6.3/examples/check.conf create mode 100644 haproxy-1.6.3/examples/content-sw-sample.cfg create mode 100644 haproxy-1.6.3/examples/debug2ansi create mode 100644 haproxy-1.6.3/examples/debug2html create mode 100644 haproxy-1.6.3/examples/debugfind create mode 100644 haproxy-1.6.3/examples/errorfiles/400.http create mode 100644 haproxy-1.6.3/examples/errorfiles/403.http create mode 100644 haproxy-1.6.3/examples/errorfiles/408.http create mode 100644 haproxy-1.6.3/examples/errorfiles/500.http create mode 100644 haproxy-1.6.3/examples/errorfiles/502.http create mode 100644 haproxy-1.6.3/examples/errorfiles/503.http create mode 100644 haproxy-1.6.3/examples/errorfiles/504.http create mode 100644 haproxy-1.6.3/examples/errorfiles/README create mode 100644 haproxy-1.6.3/examples/haproxy.init create mode 100644 haproxy-1.6.3/examples/haproxy.spec create mode 100644 haproxy-1.6.3/examples/haproxy.vim create mode 100644 haproxy-1.6.3/examples/init.haproxy create mode 100644 haproxy-1.6.3/examples/option-http_proxy.cfg create mode 100644 haproxy-1.6.3/examples/seamless_reload.txt create mode 100644 haproxy-1.6.3/examples/ssl.cfg create mode 100644 haproxy-1.6.3/examples/stats_haproxy.sh create mode 100644 haproxy-1.6.3/examples/transparent_proxy.cfg create mode 100644 haproxy-1.6.3/include/common/accept4.h create mode 100644 haproxy-1.6.3/include/common/base64.h create mode 100644 haproxy-1.6.3/include/common/buffer.h create mode 100644 haproxy-1.6.3/include/common/cfgparse.h create mode 100644 haproxy-1.6.3/include/common/chunk.h create mode 100644 haproxy-1.6.3/include/common/compat.h create mode 100644 haproxy-1.6.3/include/common/compiler.h create mode 100644 haproxy-1.6.3/include/common/config.h create mode 100644 haproxy-1.6.3/include/common/debug.h create mode 100644 haproxy-1.6.3/include/common/defaults.h create mode 100644 haproxy-1.6.3/include/common/epoll.h create mode 100644 haproxy-1.6.3/include/common/errors.h create mode 100644 haproxy-1.6.3/include/common/hash.h create mode 100644 haproxy-1.6.3/include/common/memory.h create mode 100644 haproxy-1.6.3/include/common/mini-clist.h create mode 100644 haproxy-1.6.3/include/common/namespace.h create mode 100644 haproxy-1.6.3/include/common/rbtree.h create mode 100644 haproxy-1.6.3/include/common/regex.h create mode 100644 haproxy-1.6.3/include/common/splice.h create mode 100644 haproxy-1.6.3/include/common/standard.h create mode 100644 haproxy-1.6.3/include/common/syscall.h create mode 100644 haproxy-1.6.3/include/common/template.h create mode 100644 haproxy-1.6.3/include/common/ticks.h create mode 100644 haproxy-1.6.3/include/common/time.h create mode 100644 haproxy-1.6.3/include/common/tools.h create mode 100644 haproxy-1.6.3/include/common/uri_auth.h create mode 100644 haproxy-1.6.3/include/common/version.h create mode 100644 haproxy-1.6.3/include/import/51d.h create mode 100644 haproxy-1.6.3/include/import/da.h create mode 100644 haproxy-1.6.3/include/import/lru.h create mode 100644 haproxy-1.6.3/include/import/xxhash.h create mode 100644 haproxy-1.6.3/include/proto/acl.h create mode 100644 haproxy-1.6.3/include/proto/action.h create mode 100644 haproxy-1.6.3/include/proto/applet.h create mode 100644 haproxy-1.6.3/include/proto/arg.h create mode 100644 haproxy-1.6.3/include/proto/auth.h create mode 100644 haproxy-1.6.3/include/proto/backend.h create mode 100644 haproxy-1.6.3/include/proto/channel.h create mode 100644 haproxy-1.6.3/include/proto/checks.h create mode 100644 haproxy-1.6.3/include/proto/compression.h create mode 100644 haproxy-1.6.3/include/proto/connection.h create mode 100644 haproxy-1.6.3/include/proto/dns.h create mode 100644 haproxy-1.6.3/include/proto/dumpstats.h create mode 100644 haproxy-1.6.3/include/proto/fd.h create mode 100644 haproxy-1.6.3/include/proto/freq_ctr.h create mode 100644 haproxy-1.6.3/include/proto/frontend.h create mode 100644 haproxy-1.6.3/include/proto/hdr_idx.h create mode 100644 haproxy-1.6.3/include/proto/hlua.h create mode 100644 haproxy-1.6.3/include/proto/lb_chash.h create mode 100644 haproxy-1.6.3/include/proto/lb_fas.h create mode 100644 haproxy-1.6.3/include/proto/lb_fwlc.h create mode 100644 haproxy-1.6.3/include/proto/lb_fwrr.h create mode 100644 haproxy-1.6.3/include/proto/lb_map.h create mode 100644 haproxy-1.6.3/include/proto/listener.h create mode 100644 haproxy-1.6.3/include/proto/log.h create mode 100644 haproxy-1.6.3/include/proto/map.h create mode 100644 haproxy-1.6.3/include/proto/obj_type.h create mode 100644 haproxy-1.6.3/include/proto/pattern.h create mode 100644 haproxy-1.6.3/include/proto/payload.h create mode 100644 haproxy-1.6.3/include/proto/peers.h create mode 100644 haproxy-1.6.3/include/proto/pipe.h create mode 100644 haproxy-1.6.3/include/proto/port_range.h create mode 100644 haproxy-1.6.3/include/proto/proto_http.h create mode 100644 haproxy-1.6.3/include/proto/proto_tcp.h create mode 100644 haproxy-1.6.3/include/proto/proto_udp.h create mode 100644 haproxy-1.6.3/include/proto/proto_uxst.h create mode 100644 haproxy-1.6.3/include/proto/protocol.h create mode 100644 haproxy-1.6.3/include/proto/proxy.h create mode 100644 haproxy-1.6.3/include/proto/queue.h create mode 100644 haproxy-1.6.3/include/proto/raw_sock.h create mode 100644 haproxy-1.6.3/include/proto/sample.h create mode 100644 haproxy-1.6.3/include/proto/server.h create mode 100644 haproxy-1.6.3/include/proto/session.h create mode 100644 haproxy-1.6.3/include/proto/shctx.h create mode 100644 haproxy-1.6.3/include/proto/signal.h create mode 100644 haproxy-1.6.3/include/proto/ssl_sock.h create mode 100644 haproxy-1.6.3/include/proto/stick_table.h create mode 100644 haproxy-1.6.3/include/proto/stream.h create mode 100644 haproxy-1.6.3/include/proto/stream_interface.h create mode 100644 haproxy-1.6.3/include/proto/task.h create mode 100644 haproxy-1.6.3/include/proto/template.h create mode 100644 haproxy-1.6.3/include/proto/vars.h create mode 100644 haproxy-1.6.3/include/types/acl.h create mode 100644 haproxy-1.6.3/include/types/action.h create mode 100644 haproxy-1.6.3/include/types/applet.h create mode 100644 haproxy-1.6.3/include/types/arg.h create mode 100644 haproxy-1.6.3/include/types/auth.h create mode 100644 haproxy-1.6.3/include/types/backend.h create mode 100644 haproxy-1.6.3/include/types/capture.h create mode 100644 haproxy-1.6.3/include/types/channel.h create mode 100644 haproxy-1.6.3/include/types/checks.h create mode 100644 haproxy-1.6.3/include/types/compression.h create mode 100644 haproxy-1.6.3/include/types/connection.h create mode 100644 haproxy-1.6.3/include/types/counters.h create mode 100644 haproxy-1.6.3/include/types/dns.h create mode 100644 haproxy-1.6.3/include/types/fd.h create mode 100644 haproxy-1.6.3/include/types/freq_ctr.h create mode 100644 haproxy-1.6.3/include/types/global.h create mode 100644 haproxy-1.6.3/include/types/hdr_idx.h create mode 100644 haproxy-1.6.3/include/types/hlua.h create mode 100644 haproxy-1.6.3/include/types/lb_chash.h create mode 100644 haproxy-1.6.3/include/types/lb_fas.h create mode 100644 haproxy-1.6.3/include/types/lb_fwlc.h create mode 100644 haproxy-1.6.3/include/types/lb_fwrr.h create mode 100644 haproxy-1.6.3/include/types/lb_map.h create mode 100644 haproxy-1.6.3/include/types/listener.h create mode 100644 haproxy-1.6.3/include/types/log.h create mode 100644 haproxy-1.6.3/include/types/mailers.h create mode 100644 haproxy-1.6.3/include/types/map.h create mode 100644 haproxy-1.6.3/include/types/obj_type.h create mode 100644 haproxy-1.6.3/include/types/pattern.h create mode 100644 haproxy-1.6.3/include/types/peers.h create mode 100644 haproxy-1.6.3/include/types/pipe.h create mode 100644 haproxy-1.6.3/include/types/port_range.h create mode 100644 haproxy-1.6.3/include/types/proto_http.h create mode 100644 haproxy-1.6.3/include/types/proto_udp.h create mode 100644 haproxy-1.6.3/include/types/protocol.h create mode 100644 haproxy-1.6.3/include/types/proxy.h create mode 100644 haproxy-1.6.3/include/types/queue.h create mode 100644 haproxy-1.6.3/include/types/sample.h create mode 100644 haproxy-1.6.3/include/types/server.h create mode 100644 haproxy-1.6.3/include/types/session.h create mode 100644 haproxy-1.6.3/include/types/signal.h create mode 100644 haproxy-1.6.3/include/types/ssl_sock.h create mode 100644 haproxy-1.6.3/include/types/stick_table.h create mode 100644 haproxy-1.6.3/include/types/stream.h create mode 100644 haproxy-1.6.3/include/types/stream_interface.h create mode 100644 haproxy-1.6.3/include/types/task.h create mode 100644 haproxy-1.6.3/include/types/template.h create mode 100644 haproxy-1.6.3/include/types/vars.h create mode 100644 haproxy-1.6.3/src/51d.c create mode 100644 haproxy-1.6.3/src/acl.c create mode 100644 haproxy-1.6.3/src/applet.c create mode 100644 haproxy-1.6.3/src/arg.c create mode 100644 haproxy-1.6.3/src/auth.c create mode 100644 haproxy-1.6.3/src/backend.c create mode 100644 haproxy-1.6.3/src/base64.c create mode 100644 haproxy-1.6.3/src/buffer.c create mode 100644 haproxy-1.6.3/src/cfgparse.c create mode 100644 haproxy-1.6.3/src/channel.c create mode 100644 haproxy-1.6.3/src/checks.c create mode 100644 haproxy-1.6.3/src/chunk.c create mode 100644 haproxy-1.6.3/src/compression.c create mode 100644 haproxy-1.6.3/src/connection.c create mode 100644 haproxy-1.6.3/src/da.c create mode 100644 haproxy-1.6.3/src/dns.c create mode 100644 haproxy-1.6.3/src/dumpstats.c create mode 100644 haproxy-1.6.3/src/ev_epoll.c create mode 100644 haproxy-1.6.3/src/ev_kqueue.c create mode 100644 haproxy-1.6.3/src/ev_poll.c create mode 100644 haproxy-1.6.3/src/ev_select.c create mode 100644 haproxy-1.6.3/src/fd.c create mode 100644 haproxy-1.6.3/src/freq_ctr.c create mode 100644 haproxy-1.6.3/src/frontend.c create mode 100644 haproxy-1.6.3/src/haproxy-systemd-wrapper.c create mode 100644 haproxy-1.6.3/src/haproxy.c create mode 100644 haproxy-1.6.3/src/hash.c create mode 100644 haproxy-1.6.3/src/hdr_idx.c create mode 100644 haproxy-1.6.3/src/hlua.c create mode 100644 haproxy-1.6.3/src/i386-linux-vsys.c create mode 100644 haproxy-1.6.3/src/lb_chash.c create mode 100644 haproxy-1.6.3/src/lb_fas.c create mode 100644 haproxy-1.6.3/src/lb_fwlc.c create mode 100644 haproxy-1.6.3/src/lb_fwrr.c create mode 100644 haproxy-1.6.3/src/lb_map.c create mode 100644 haproxy-1.6.3/src/listener.c create mode 100644 haproxy-1.6.3/src/log.c create mode 100644 haproxy-1.6.3/src/lru.c create mode 100644 haproxy-1.6.3/src/mailers.c create mode 100644 haproxy-1.6.3/src/map.c create mode 100644 haproxy-1.6.3/src/memory.c create mode 100644 haproxy-1.6.3/src/namespace.c create mode 100644 haproxy-1.6.3/src/pattern.c create mode 100644 haproxy-1.6.3/src/payload.c create mode 100644 haproxy-1.6.3/src/peers.c create mode 100644 haproxy-1.6.3/src/pipe.c create mode 100644 haproxy-1.6.3/src/proto_http.c create mode 100644 haproxy-1.6.3/src/proto_tcp.c create mode 100644 haproxy-1.6.3/src/proto_udp.c create mode 100644 haproxy-1.6.3/src/proto_uxst.c create mode 100644 haproxy-1.6.3/src/protocol.c create mode 100644 haproxy-1.6.3/src/proxy.c create mode 100644 haproxy-1.6.3/src/queue.c create mode 100644 haproxy-1.6.3/src/raw_sock.c create mode 100644 haproxy-1.6.3/src/rbtree.c create mode 100644 haproxy-1.6.3/src/regex.c create mode 100644 haproxy-1.6.3/src/sample.c create mode 100644 haproxy-1.6.3/src/server.c create mode 100644 haproxy-1.6.3/src/session.c create mode 100644 haproxy-1.6.3/src/shctx.c create mode 100644 haproxy-1.6.3/src/signal.c create mode 100644 haproxy-1.6.3/src/ssl_sock.c create mode 100644 haproxy-1.6.3/src/standard.c create mode 100644 haproxy-1.6.3/src/stick_table.c create mode 100644 haproxy-1.6.3/src/stream.c create mode 100644 haproxy-1.6.3/src/stream_interface.c create mode 100644 haproxy-1.6.3/src/task.c create mode 100644 haproxy-1.6.3/src/time.c create mode 100644 haproxy-1.6.3/src/trace.c create mode 100644 haproxy-1.6.3/src/uri_auth.c create mode 100644 haproxy-1.6.3/src/vars.c create mode 100644 haproxy-1.6.3/src/xxhash.c create mode 100644 haproxy-1.6.3/tests/0000-debug-stats.diff create mode 100644 haproxy-1.6.3/tests/filltab25.c create mode 100644 haproxy-1.6.3/tests/hash_results.txt create mode 100644 haproxy-1.6.3/tests/hashing-results.txt create mode 100644 haproxy-1.6.3/tests/io_limits.txt create mode 100644 haproxy-1.6.3/tests/ip-hash.c create mode 100644 haproxy-1.6.3/tests/reset.c create mode 100644 haproxy-1.6.3/tests/sockstat.txt create mode 100644 haproxy-1.6.3/tests/test-acl-args.cfg create mode 100644 haproxy-1.6.3/tests/test-address-syntax.cfg create mode 100644 haproxy-1.6.3/tests/test-arg.c create mode 100644 haproxy-1.6.3/tests/test-backlog.cfg create mode 100644 haproxy-1.6.3/tests/test-check-expect.cfg create mode 100644 haproxy-1.6.3/tests/test-connection.cfg create mode 100644 haproxy-1.6.3/tests/test-cookie-indirect.cfg create mode 100644 haproxy-1.6.3/tests/test-cookie-insert.cfg create mode 100644 haproxy-1.6.3/tests/test-cookie-passive.cfg create mode 100644 haproxy-1.6.3/tests/test-cookie-prefix.cfg create mode 100644 haproxy-1.6.3/tests/test-cookie-rewrite.cfg create mode 100644 haproxy-1.6.3/tests/test-disable-404.cfg create mode 100644 haproxy-1.6.3/tests/test-fsm.cfg create mode 100644 haproxy-1.6.3/tests/test-fwlc.cfg create mode 100644 haproxy-1.6.3/tests/test-fwrr.cfg create mode 100644 haproxy-1.6.3/tests/test-http-send-name-hdr.cfg create mode 100644 haproxy-1.6.3/tests/test-inspect-smtp.cfg create mode 100644 haproxy-1.6.3/tests/test-inspect-ssl.cfg create mode 100644 haproxy-1.6.3/tests/test-map-ports.cfg create mode 100644 haproxy-1.6.3/tests/test-pollers.cfg create mode 100644 haproxy-1.6.3/tests/test-redirect.cfg create mode 100644 haproxy-1.6.3/tests/test-sample-fetch-args.cfg create mode 100644 haproxy-1.6.3/tests/test-sample-fetch-conv.cfg create mode 100644 haproxy-1.6.3/tests/test-sql.cfg create mode 100644 haproxy-1.6.3/tests/test-str2sa.cfg create mode 100644 haproxy-1.6.3/tests/test-time.cfg create mode 100644 haproxy-1.6.3/tests/test-timeout.cfg create mode 100644 haproxy-1.6.3/tests/test-url-hash.cfg create mode 100644 haproxy-1.6.3/tests/test-valid-names.cfg create mode 100644 haproxy-1.6.3/tests/test.c create mode 100644 haproxy-1.6.3/tests/test_hashes.c create mode 100644 haproxy-1.6.3/tests/test_pools.c create mode 100644 haproxy-1.6.3/tests/testinet.c create mode 100644 haproxy-1.6.3/tests/uri_hash.c diff --git a/debian/NEWS b/debian/NEWS new file mode 100644 index 0000000..104dadc --- /dev/null +++ b/debian/NEWS @@ -0,0 +1,26 @@ +haproxy (1.4.23-1) unstable; urgency=low + + As of 1.4.23-1, the Debian package ships an rsyslog snippet to allow logging + via /dev/log from chrooted HAProxy processes. If you are using rsyslog, you + should restart rsyslog after installing this package to enable HAProxy to log + via rsyslog. See /usr/share/doc/haproxy/README.Debian for more details. + + Also note that as of 1.4.23-1, chrooting the HAProxy process is enabled in the + default Debian configuration. + + -- Apollon Oikonomopoulos Thu, 25 Apr 2013 23:26:35 +0300 + +haproxy (1.4.13-1) unstable; urgency=low + + Maintainer of this package has changed. + + -- Christo Buschek Mon, 10 Mar 2011 22:07:10 +0100 + +haproxy (1.3.14.2-1) unstable; urgency=low + + Configuration has moved to /etc/haproxy/haproxy.cfg. This allows to add the + configurable /etc/haproxy/errors directory. + The haproxy binary was also moved to /usr/sbin rather than /usr/bin, update + your init script or reinstall the one provided with the package. + + -- Arnaud Cornet Mon, 21 Jan 2008 23:38:15 +0100 diff --git a/debian/README.Debian b/debian/README.Debian new file mode 100644 index 0000000..0de422b --- /dev/null +++ b/debian/README.Debian @@ -0,0 +1,22 @@ +Binding non-local IPv6 addresses +================================ + +There are cases where HAProxy needs to bind() a non-existing address, like +for example in high-availability setups with floating IP addresses (e.g. using +keepalived or ucarp). For IPv4 the net.ipv4.ip_nonlocal_bind sysctl can be used +to permit binding non-existing addresses, such a control does not exist for +IPv6 however. + +The solution is to add the "transparent" parameter to the frontend's bind +statement, for example: + +frontend fe1 + bind 2001:db8:abcd:f00::1:8080 transparent + +This will require a recent Linux kernel (>= 2.6.28) with TPROXY support (Debian +kernels will work correctly with this option). + +See /usr/share/doc/haproxy/configuration.txt.gz for more information on the +"transparent" bind parameter. + + -- Apollon Oikonomopoulos Wed, 16 Oct 2013 21:18:58 +0300 diff --git a/debian/changelog b/debian/changelog new file mode 100644 index 0000000..47e8a6a --- /dev/null +++ b/debian/changelog @@ -0,0 +1,874 @@ +haproxy (1.6.3-1~u16.04+mos1) mos10.0; urgency=medium + + * Add MIRA0001-Adding-include-configuration-statement-to-haproxy.patch + + -- Dmitry Teselkin Fri, 17 Jun 2016 15:28:32 +0000 + +haproxy (1.6.3-1) unstable; urgency=medium + + [ Apollon Oikonomopoulos ] + * haproxy.init: use s-s-d's --pidfile option. + Thanks to Louis Bouchard (Closes: 804530) + + [ Vincent Bernat ] + * watch: fix d/watch to look for 1.6 version + * Imported Upstream version 1.6.3 + + -- Vincent Bernat Thu, 31 Dec 2015 08:10:10 +0100 + +haproxy (1.6.2-2) unstable; urgency=medium + + * Enable USE_REGPARM on amd64 as well. + + -- Vincent Bernat Tue, 03 Nov 2015 21:21:30 +0100 + +haproxy (1.6.2-1) unstable; urgency=medium + + * New upstream release. + - BUG/MAJOR: dns: first DNS response packet not matching queried + hostname may lead to a loop + - BUG/MAJOR: http: don't requeue an idle connection that is already + queued + * Upload to unstable. + + -- Vincent Bernat Tue, 03 Nov 2015 13:36:22 +0100 + +haproxy (1.6.1-2) experimental; urgency=medium + + * Build the Lua manpage in -arch, fixes FTBFS in binary-only builds. + + -- Apollon Oikonomopoulos Thu, 22 Oct 2015 12:19:41 +0300 + +haproxy (1.6.1-1) experimental; urgency=medium + + [ Vincent Bernat ] + * New upstream release. + - BUG/MAJOR: ssl: free the generated SSL_CTX if the LRU cache is + disabled + * Drop 0001-BUILD-install-only-relevant-and-existing-documentati.patch. + + [ Apollon Oikonomopoulos ] + * Ship and generate Lua API documentation. + + -- Vincent Bernat Thu, 22 Oct 2015 10:45:55 +0200 + +haproxy (1.6.0+ds1-1) experimental; urgency=medium + + * New upstream release! + * Add a patch to fix documentation installation: + + 0001-BUILD-install-only-relevant-and-existing-documentati.patch + * Update HAProxy documentation converter to a more recent version. + + -- Vincent Bernat Wed, 14 Oct 2015 17:29:19 +0200 + +haproxy (1.6~dev7-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Tue, 06 Oct 2015 16:01:26 +0200 + +haproxy (1.6~dev5-1) experimental; urgency=medium + + * New upstream release. + + -- Vincent Bernat Mon, 14 Sep 2015 15:50:28 +0200 + +haproxy (1.6~dev4-1) experimental; urgency=medium + + * New upstream release. + * Refresh debian/copyright. + + -- Vincent Bernat Sun, 30 Aug 2015 23:54:10 +0200 + +haproxy (1.6~dev3-1) experimental; urgency=medium + + * New upstream release. + * Enable Lua support. + + -- Vincent Bernat Sat, 15 Aug 2015 17:51:29 +0200 + +haproxy (1.5.15-1) unstable; urgency=medium + + * New upstream stable release including the following fix: + - BUG/MAJOR: http: don't call http_send_name_header() after an error + + -- Vincent Bernat Mon, 02 Nov 2015 07:34:19 +0100 + +haproxy (1.5.14-1) unstable; urgency=high + + * New upstream version. Fix an information leak (CVE-2015-3281): + - BUG/MAJOR: buffers: make the buffer_slow_realign() function + respect output data. + * Add $named as a dependency for init script. Closes: #790638. + + -- Vincent Bernat Fri, 03 Jul 2015 19:49:02 +0200 + +haproxy (1.5.13-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + - MAJOR: peers: allow peers section to be used with nbproc > 1 + - BUG/MAJOR: checks: always check for end of list before proceeding + - MEDIUM: ssl: replace standards DH groups with custom ones + - BUG/MEDIUM: ssl: fix tune.ssl.default-dh-param value being overwritten + - BUG/MEDIUM: cfgparse: segfault when userlist is misused + - BUG/MEDIUM: stats: properly initialize the scope before dumping stats + - BUG/MEDIUM: http: don't forward client shutdown without NOLINGER + except for tunnels + - BUG/MEDIUM: checks: do not dereference head of a tcp-check at the end + - BUG/MEDIUM: checks: do not dereference a list as a tcpcheck struct + - BUG/MEDIUM: peers: apply a random reconnection timeout + - BUG/MEDIUM: config: properly compute the default number of processes + for a proxy + + -- Vincent Bernat Sat, 27 Jun 2015 20:52:07 +0200 + +haproxy (1.5.12-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: http: don't read past buffer's end in http_replace_value + - BUG/MAJOR: http: prevent risk of reading past end with balance + url_param + - BUG/MEDIUM: Do not consider an agent check as failed on L7 error + - BUG/MEDIUM: patern: some entries are not deleted with case + insensitive match + - BUG/MEDIUM: buffer: one byte miss in buffer free space check + - BUG/MEDIUM: http: thefunction "(req|res)-replace-value" doesn't + respect the HTTP syntax + - BUG/MEDIUM: peers: correctly configure the client timeout + - BUG/MEDIUM: http: hdr_cnt would not count any header when called + without name + - BUG/MEDIUM: listener: don't report an error when resuming unbound + listeners + - BUG/MEDIUM: init: don't limit cpu-map to the first 32 processes only + - BUG/MEDIUM: stream-int: always reset si->ops when si->end is + nullified + - BUG/MEDIUM: http: remove content-length from chunked messages + - BUG/MEDIUM: http: do not restrict parsing of transfer-encoding to + HTTP/1.1 + - BUG/MEDIUM: http: incorrect transfer-coding in the request is a bad + request + - BUG/MEDIUM: http: remove content-length form responses with bad + transfer-encoding + - BUG/MEDIUM: http: wait for the exact amount of body bytes in + wait_for_request_body + + -- Vincent Bernat Sat, 02 May 2015 16:38:28 +0200 + +haproxy (1.5.11-2) unstable; urgency=medium + + * Upload to unstable. + + -- Vincent Bernat Sun, 26 Apr 2015 17:46:58 +0200 + +haproxy (1.5.11-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: log: don't try to emit a log if no logger is set + - BUG/MEDIUM: backend: correctly detect the domain when + use_domain_only is used + - BUG/MEDIUM: Do not set agent health to zero if server is disabled + in config + - BUG/MEDIUM: Only explicitly report "DOWN (agent)" if the agent health + is zero + - BUG/MEDIUM: http: fix header removal when previous header ends with + pure LF + - BUG/MEDIUM: channel: fix possible integer overflow on reserved size + computation + - BUG/MEDIUM: channel: don't schedule data in transit for leaving until + connected + - BUG/MEDIUM: http: make http-request set-header compute the string + before removal + * Upload to experimental. + + -- Vincent Bernat Sun, 01 Feb 2015 09:22:27 +0100 + +haproxy (1.5.10-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: stream-int: properly check the memory allocation return + - BUG/MEDIUM: sample: fix random number upper-bound + - BUG/MEDIUM: patterns: previous fix was incomplete + - BUG/MEDIUM: payload: ensure that a request channel is available + - BUG/MEDIUM: tcp-check: don't rely on random memory contents + - BUG/MEDIUM: tcp-checks: disable quick-ack unless next rule is an expect + - BUG/MEDIUM: config: do not propagate processes between stopped + processes + - BUG/MEDIUM: memory: fix freeing logic in pool_gc2() + - BUG/MEDIUM: compression: correctly report zlib_mem + * Upload to experimental. + + -- Vincent Bernat Sun, 04 Jan 2015 13:17:56 +0100 + +haproxy (1.5.9-1) experimental; urgency=medium + + * New upstream stable release including the following fixes: + - BUG/MAJOR: sessions: unlink session from list on out + of memory + - BUG/MEDIUM: pattern: don't load more than once a pattern + list. + - BUG/MEDIUM: connection: sanitize PPv2 header length before + parsing address information + - BUG/MAJOR: frontend: initialize capture pointers earlier + - BUG/MEDIUM: checks: fix conflicts between agent checks and + ssl healthchecks + - BUG/MEDIUM: ssl: force a full GC in case of memory shortage + - BUG/MEDIUM: ssl: fix bad ssl context init can cause + segfault in case of OOM. + * Upload to experimental. + + -- Vincent Bernat Sun, 07 Dec 2014 16:37:36 +0100 + +haproxy (1.5.8-3) unstable; urgency=medium + + * Remove RC4 from the default cipher string shipped in configuration. + + -- Vincent Bernat Fri, 27 Feb 2015 11:29:23 +0100 + +haproxy (1.5.8-2) unstable; urgency=medium + + * Cherry-pick the following patches from 1.5.9 release: + - 8a0b93bde77e BUG/MAJOR: sessions: unlink session from list on out + of memory + - bae03eaad40a BUG/MEDIUM: pattern: don't load more than once a pattern + list. + - 93637b6e8503 BUG/MEDIUM: connection: sanitize PPv2 header length before + parsing address information + - 8ba50128832b BUG/MAJOR: frontend: initialize capture pointers earlier + - 1f96a87c4e14 BUG/MEDIUM: checks: fix conflicts between agent checks and + ssl healthchecks + - 9bcc01ae2598 BUG/MEDIUM: ssl: force a full GC in case of memory shortage + - 909514970089 BUG/MEDIUM: ssl: fix bad ssl context init can cause + segfault in case of OOM. + * Cherry-pick the following patches from future 1.5.10 release: + - 1e89acb6be9b BUG/MEDIUM: payload: ensure that a request channel is + available + - bad3c6f1b6d7 BUG/MEDIUM: patterns: previous fix was incomplete + + -- Vincent Bernat Sun, 07 Dec 2014 11:11:21 +0100 + +haproxy (1.5.8-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + + + BUG/MAJOR: buffer: check the space left is enough or not when input + data in a buffer is wrapped + + BUG/MINOR: ssl: correctly initialize ssl ctx for invalid certificates + + BUG/MEDIUM: tcp: don't use SO_ORIGINAL_DST on non-AF_INET sockets + + BUG/MEDIUM: regex: fix pcre_study error handling + + BUG/MEDIUM: tcp: fix outgoing polling based on proxy protocol + + BUG/MINOR: log: fix request flags when keep-alive is enabled + + BUG/MAJOR: cli: explicitly call cli_release_handler() upon error + + BUG/MEDIUM: http: don't dump debug headers on MSG_ERROR + * Also includes the following new features: + + MINOR: ssl: add statement to force some ssl options in global. + + MINOR: ssl: add fetchs 'ssl_c_der' and 'ssl_f_der' to return DER + formatted certs + * Disable SSLv3 in the default configuration file. + + -- Vincent Bernat Fri, 31 Oct 2014 13:48:19 +0100 + +haproxy (1.5.6-1) unstable; urgency=medium + + * New upstream stable release including the following fixes: + + BUG/MEDIUM: systemd: set KillMode to 'mixed' + + MINOR: systemd: Check configuration before start + + BUG/MEDIUM: config: avoid skipping disabled proxies + + BUG/MINOR: config: do not accept more track-sc than configured + + BUG/MEDIUM: backend: fix URI hash when a query string is present + * Drop systemd patches: + + haproxy.service-also-check-on-start.patch + + haproxy.service-set-killmode-to-mixed.patch + * Refresh other patches. + + -- Vincent Bernat Mon, 20 Oct 2014 18:10:21 +0200 + +haproxy (1.5.5-1) unstable; urgency=medium + + [ Vincent Bernat ] + * initscript: use start-stop-daemon to reliably terminate all haproxy + processes. Also treat stopping a non-running haproxy as success. + (Closes: #762608, LP: #1038139) + + [ Apollon Oikonomopoulos ] + * New upstream stable release including the following fixes: + + DOC: Address issue where documentation is excluded due to a gitignore + rule. + + MEDIUM: Improve signal handling in systemd wrapper. + + BUG/MINOR: config: don't propagate process binding for dynamic + use_backend + + MINOR: Also accept SIGHUP/SIGTERM in systemd-wrapper + + DOC: clearly state that the "show sess" output format is not fixed + + MINOR: stats: fix minor typo fix in stats_dump_errors_to_buffer() + + DOC: indicate in the doc that track-sc* can wait if data are missing + + MEDIUM: http: enable header manipulation for 101 responses + + BUG/MEDIUM: config: propagate frontend to backend process binding again. + + MEDIUM: config: properly propagate process binding between proxies + + MEDIUM: config: make the frontends automatically bind to the listeners' + processes + + MEDIUM: config: compute the exact bind-process before listener's + maxaccept + + MEDIUM: config: only warn if stats are attached to multi-process bind + directives + + MEDIUM: config: report it when tcp-request rules are misplaced + + MINOR: config: detect the case where a tcp-request content rule has no + inspect-delay + + MEDIUM: systemd-wrapper: support multiple executable versions and names + + BUG/MEDIUM: remove debugging code from systemd-wrapper + + BUG/MEDIUM: http: adjust close mode when switching to backend + + BUG/MINOR: config: don't propagate process binding on fatal errors. + + BUG/MEDIUM: check: rule-less tcp-check must detect connect failures + + BUG/MINOR: tcp-check: report the correct failed step in the status + + DOC: indicate that weight zero is reported as DRAIN + * Add a new patch (haproxy.service-set-killmode-to-mixed.patch) to fix the + systemctl stop action conflicting with the systemd wrapper now catching + SIGTERM. + * Bump standards to 3.9.6; no changes needed. + * haproxy-doc: link to tracker.debian.org instead of packages.qa.debian.org. + * d/copyright: move debian/dconv/* paragraph after debian/*, so that it + actually matches the files it is supposed to. + + -- Apollon Oikonomopoulos Wed, 08 Oct 2014 12:34:53 +0300 + +haproxy (1.5.4-1) unstable; urgency=high + + * New upstream version. + + Fix a critical bug that, under certain unlikely conditions, allows a + client to crash haproxy. + * Prefix rsyslog configuration file to ensure to log only to + /var/log/haproxy. Thanks to Paul Bourke for the patch. + + -- Vincent Bernat Tue, 02 Sep 2014 19:14:38 +0200 + +haproxy (1.5.3-1) unstable; urgency=medium + + * New upstream stable release, fixing the following issues: + + Memory corruption when building a proxy protocol v2 header + + Memory leak in SSL DHE key exchange + + -- Apollon Oikonomopoulos Fri, 25 Jul 2014 10:41:36 +0300 + +haproxy (1.5.2-1) unstable; urgency=medium + + * New upstream stable release. Important fixes: + + A few sample fetch functions when combined in certain ways would return + malformed results, possibly crashing the HAProxy process. + + Hash-based load balancing and http-send-name-header would fail for + requests which contain a body which starts to be forwarded before the + data is used. + + -- Apollon Oikonomopoulos Mon, 14 Jul 2014 00:42:32 +0300 + +haproxy (1.5.1-1) unstable; urgency=medium + + * New upstream stable release: + + Fix a file descriptor leak for clients that disappear before connecting. + + Do not staple expired OCSP responses. + + -- Apollon Oikonomopoulos Tue, 24 Jun 2014 12:56:30 +0300 + +haproxy (1.5.0-1) unstable; urgency=medium + + * New upstream stable series. Notable changes since the 1.4 series: + + Native SSL support on both sides with SNI/NPN/ALPN and OCSP stapling. + + IPv6 and UNIX sockets are supported everywhere + + End-to-end HTTP keep-alive for better support of NTLM and improved + efficiency in static farms + + HTTP/1.1 response compression (deflate, gzip) to save bandwidth + + PROXY protocol versions 1 and 2 on both sides + + Data sampling on everything in request or response, including payload + + ACLs can use any matching method with any input sample + + Maps and dynamic ACLs updatable from the CLI + + Stick-tables support counters to track activity on any input sample + + Custom format for logs, unique-id, header rewriting, and redirects + + Improved health checks (SSL, scripted TCP, check agent, ...) + + Much more scalable configuration supports hundreds of thousands of + backends and certificates without sweating + + * Upload to unstable, merge all 1.5 work from experimental. Most important + packaging changes since 1.4.25-1 include: + + systemd support. + + A more sane default config file. + + Zero-downtime upgrades between 1.5 releases by gracefully reloading + HAProxy during upgrades. + + HTML documentation shipped in the haproxy-doc package. + + kqueue support for kfreebsd. + + * Packaging changes since 1.5~dev26-2: + + Drop patches merged upstream: + o Fix-reference-location-in-manpage.patch + o 0001-BUILD-stats-workaround-stupid-and-bogus-Werror-forma.patch + + d/watch: look for stable 1.5 releases + + systemd: respect CONFIG and EXTRAOPTS when specified in + /etc/default/haproxy. + + initscript: test the configuration before start or reload. + + initscript: remove the ENABLED flag and logic. + + -- Apollon Oikonomopoulos Fri, 20 Jun 2014 11:05:17 +0300 + +haproxy (1.5~dev26-2) experimental; urgency=medium + + * initscript: start should not fail when haproxy is already running + + Fixes upgrades from post-1.5~dev24-1 installations + + -- Apollon Oikonomopoulos Wed, 04 Jun 2014 13:20:39 +0300 + +haproxy (1.5~dev26-1) experimental; urgency=medium + + * New upstream development version. + + Add a patch to fix compilation with -Werror=format-security + + -- Vincent Bernat Wed, 28 May 2014 20:32:10 +0200 + +haproxy (1.5~dev25-1) experimental; urgency=medium + + [ Vincent Bernat ] + * New upstream development version. + * Rename "contimeout", "clitimeout" and "srvtimeout" in the default + configuration file to "timeout connection", "timeout client" and + "timeout server". + + [ Apollon Oikonomopoulos ] + * Build on kfreebsd using the "freebsd" target; enables kqueue support. + + -- Vincent Bernat Thu, 15 May 2014 00:20:11 +0200 + +haproxy (1.5~dev24-2) experimental; urgency=medium + + * New binary package: haproxy-doc + + Contains the HTML documentation built using a version of Cyril Bonté's + haproxy-dconv (https://github.com/cbonte/haproxy-dconv). + + Add Build-Depends-Indep on python and python-mako + + haproxy Suggests: haproxy-doc + * systemd: check config file for validity on reload. + * haproxy.cfg: + + Enable the stats socket by default and bind it to + /run/haproxy/admin.sock, which is accessible by the haproxy group. + /run/haproxy creation is handled by the initscript for sysv-rc and a + tmpfiles.d config for systemd. + + Set the default locations for CA and server certificates to + /etc/ssl/certs and /etc/ssl/private respectively. + + Set the default cipher list to be used on listening SSL sockets to + enable PFS, preferring ECDHE ciphers by default. + * Gracefully reload HAProxy on upgrade instead of performing a full restart. + * debian/rules: split build into binary-arch and binary-indep. + * Build-depend on debhelper >= 9, set compat to 9. + + -- Apollon Oikonomopoulos Sun, 27 Apr 2014 13:37:17 +0300 + +haproxy (1.5~dev24-1) experimental; urgency=medium + + * New upstream development version, fixes major regressions introduced in + 1.5~dev23: + + + Forwarding of a message body (request or response) would automatically + stop after the transfer timeout strikes, and with no error. + + Redirects failed to update the msg->next offset after consuming the + request, so if they were made with keep-alive enabled and starting with + a slash (relative location), then the buffer was shifted by a negative + amount of data, causing a crash. + + The code to standardize DH parameters caused an important performance + regression for, so it was temporarily reverted for the time needed to + understand the cause and to fix it. + + For a complete release announcement, including other bugfixes and feature + enhancements, see http://deb.li/yBVA. + + -- Apollon Oikonomopoulos Sun, 27 Apr 2014 11:09:37 +0300 + +haproxy (1.5~dev23-1) experimental; urgency=medium + + * New upstream development version; notable changes since 1.5~dev22: + + SSL record size optimizations to speed up both, small and large + transfers. + + Dynamic backend name support in use_backend. + + Compressed chunked transfer encoding support. + + Dynamic ACL manipulation via the CLI. + + New "language" converter for extracting language preferences from + Accept-Language headers. + * Remove halog source and systemd unit files from + /usr/share/doc/haproxy/contrib, they are built and shipped in their + appropriate locations since 1.5~dev19-2. + + -- Apollon Oikonomopoulos Wed, 23 Apr 2014 11:12:34 +0300 + +haproxy (1.5~dev22-1) experimental; urgency=medium + + * New upstream development version + * watch: use the source page and not the main one + + -- Apollon Oikonomopoulos Mon, 03 Feb 2014 17:45:51 +0200 + +haproxy (1.5~dev21+20140118-1) experimental; urgency=medium + + * New upstream development snapshot, with the following fixes since + 1.5-dev21: + + 00b0fb9 BUG/MAJOR: ssl: fix breakage caused by recent fix abf08d9 + + 410f810 BUG/MEDIUM: map: segmentation fault with the stats's socket + command "set map ..." + + abf08d9 BUG/MAJOR: connection: fix mismatch between rcv_buf's API and + usage + + 35249cb BUG/MINOR: pattern: pattern comparison executed twice + + c920096 BUG/MINOR: http: don't clear the SI_FL_DONT_WAKE flag between + requests + + b800623 BUG/MEDIUM: stats: fix HTTP/1.0 breakage introduced in previous + patch + + 61f7f0a BUG/MINOR: stream-int: do not clear the owner upon unregister + + 983eb31 BUG/MINOR: channel: CHN_INFINITE_FORWARD must be unsigned + + a3ae932 BUG/MEDIUM: stats: the web interface must check the tracked + servers before enabling + + e24d963 BUG/MEDIUM: checks: unchecked servers could not be enabled + anymore + + 7257550 BUG/MINOR: http: always disable compression on HTTP/1.0 + + 9f708ab BUG/MINOR: checks: successful check completion must not + re-enable MAINT servers + + ff605db BUG/MEDIUM: backend: do not re-initialize the connection's + context upon reuse + + ea90063 BUG/MEDIUM: stream-int: fix the keep-alive idle connection + handler + * Update debian/copyright to reflect the license of ebtree/ + (closes: #732614) + * Synchronize debian/copyright with source + * Add Documentation field to the systemd unit file + + -- Apollon Oikonomopoulos Mon, 20 Jan 2014 10:07:34 +0200 + +haproxy (1.5~dev21-1) experimental; urgency=low + + [ Prach Pongpanich ] + * Bump Standards-Version to 3.9.5 + + [ Thomas Bechtold ] + * debian/control: Add haproxy-dbg binary package for debug symbols. + + [ Apollon Oikonomopoulos ] + * New upstream development version. + * Require syslog to be operational before starting. Closes: #726323. + + -- Vincent Bernat Tue, 17 Dec 2013 01:38:04 +0700 + +haproxy (1.5~dev19-2) experimental; urgency=low + + [ Vincent Bernat ] + * Really enable systemd support by using dh-systemd helper. + * Don't use -L/usr/lib and rely on default search path. Closes: #722777. + + [ Apollon Oikonomopoulos ] + * Ship halog. + + -- Vincent Bernat Thu, 12 Sep 2013 21:58:05 +0200 + +haproxy (1.5~dev19-1) experimental; urgency=high + + [ Vincent Bernat ] + * New upstream version. + + CVE-2013-2175: fix a possible crash when using negative header + occurrences. + + Drop 0002-Fix-typo-in-src-haproxy.patch: applied upstream. + * Enable gzip compression feature. + + [ Prach Pongpanich ] + * Drop bashism patch. It seems useless to maintain a patch to convert + example scripts from /bin/bash to /bin/sh. + * Fix reload/restart action of init script (LP: #1187469) + + -- Vincent Bernat Mon, 17 Jun 2013 22:03:58 +0200 + +haproxy (1.5~dev18-1) experimental; urgency=low + + [ Apollon Oikonomopoulos ] + * New upstream development version + + [ Vincent Bernat ] + * Add support for systemd. Currently, /etc/default/haproxy is not used + when using systemd. + + -- Vincent Bernat Sun, 26 May 2013 12:33:00 +0200 + +haproxy (1.4.25-1) unstable; urgency=medium + + [ Prach Pongpanich ] + * New upstream version. + * Update watch file to use the source page. + * Bump Standards-Version to 3.9.5. + + [ Thomas Bechtold ] + * debian/control: Add haproxy-dbg binary package for debug symbols. + + [ Apollon Oikonomopoulos ] + * Require syslog to be operational before starting. Closes: #726323. + * Document how to bind non-local IPv6 addresses. + * Add a reference to configuration.txt.gz to the manpage. + * debian/copyright: synchronize with source. + + -- Prach Pongpanich Fri, 28 Mar 2014 09:35:09 +0700 + +haproxy (1.4.24-2) unstable; urgency=low + + [ Apollon Oikonomopoulos ] + * Ship contrib/halog as /usr/bin/halog. + + [ Vincent Bernat ] + * Don't use -L/usr/lib and rely on default search path. Closes: #722777. + + -- Vincent Bernat Sun, 15 Sep 2013 14:36:27 +0200 + +haproxy (1.4.24-1) unstable; urgency=high + + [ Vincent Bernat ] + * New upstream version. + + CVE-2013-2175: fix a possible crash when using negative header + occurrences. + + [ Prach Pongpanich ] + * Drop bashism patch. It seems useless to maintain a patch to convert + example scripts from /bin/bash to /bin/sh. + * Fix reload/restart action of init script (LP: #1187469). + + -- Vincent Bernat Mon, 17 Jun 2013 21:56:26 +0200 + +haproxy (1.4.23-1) unstable; urgency=low + + [ Apollon Oikonomopoulos ] + * New upstream version (Closes: #643650, #678953) + + This fixes CVE-2012-2942 (Closes: #674447) + + This fixes CVE-2013-1912 (Closes: #704611) + * Ship vim addon as vim-haproxy (Closes: #702893) + * Check for the configuration file after sourcing /etc/default/haproxy + (Closes: #641762) + * Use /dev/log for logging by default (Closes: #649085) + + [ Vincent Bernat ] + * debian/control: + + add Vcs-* fields + + switch maintenance to Debian HAProxy team. (Closes: #706890) + + drop dependency to quilt: 3.0 (quilt) format is in use. + * debian/rules: + + don't explicitly call dh_installchangelog. + + use dh_installdirs to install directories. + + use dh_install to install error and configuration files. + + switch to `linux2628` Makefile target for Linux. + * debian/postrm: + + remove haproxy user and group on purge. + * Ship a more minimal haproxy.cfg file: no `listen` blocks but `global` + and `defaults` block with appropriate configuration to use chroot and + logging in the expected way. + + [ Prach Pongpanich ] + * debian/copyright: + + add missing copyright holders + + update years of copyright + * debian/rules: + + build with -Wl,--as-needed to get rid of unnecessary depends + * Remove useless files in debian/haproxy.{docs,examples} + * Update debian/watch file, thanks to Bart Martens + + -- Vincent Bernat Mon, 06 May 2013 20:02:14 +0200 + +haproxy (1.4.15-1) unstable; urgency=low + + * New upstream release with critical bug fix (Closes: #631351) + + -- Christo Buschek Thu, 14 Jul 2011 18:17:05 +0200 + +haproxy (1.4.13-1) unstable; urgency=low + + * New maintainer upload (Closes: #615246) + * New upstream release + * Standards-version goes 3.9.1 (no change) + * Added patch bashism (Closes: #581109) + * Added a README.source file. + + -- Christo Buschek Thu, 11 Mar 2011 12:41:59 +0000 + +haproxy (1.4.8-1) unstable; urgency=low + + * New upstream release. + + -- Arnaud Cornet Fri, 18 Jun 2010 00:42:53 +0100 + +haproxy (1.4.4-1) unstable; urgency=low + + * New upstream release + * Add splice and tproxy support + * Add regparm optimization on i386 + * Switch to dpkg-source 3.0 (quilt) format + + -- Arnaud Cornet Thu, 15 Apr 2010 20:00:34 +0100 + +haproxy (1.4.2-1) unstable; urgency=low + + * New upstream release + * Remove debian/patches/haproxy.1-hyphen.patch gone upstream + * Tighten quilt build dep (Closes: #567087) + * standards-version goes 3.8.4 (no change) + * Add $remote_fs to init.d script required start and stop + + -- Arnaud Cornet Sat, 27 Mar 2010 15:19:48 +0000 + +haproxy (1.3.22-1) unstable; urgency=low + + * New upstream bugfix release + + -- Arnaud Cornet Mon, 19 Oct 2009 22:31:45 +0100 + +haproxy (1.3.21-1) unstable; urgency=low + + [ Michael Shuler ] + * New Upstream Version (Closes: #538992) + * Added override for example shell scripts in docs (Closes: #530096) + * Added upstream changelog to docs + * Added debian/watch + * Updated debian/copyright format + * Added haproxy.1-hyphen.patch, to fix hyphen in man page + * Upgrade Standards-Version to 3.8.3 (no change needed) + * Upgrade debian/compat to 7 (no change needed) + + [ Arnaud Cornet ] + * New upstream version. + * Merge Michael's work, few changelog fixes + * Add debian/README.source to point to quilt doc + * Depend on debhelper >= 7.0.50~ and use overrides in debian/rules + + -- Arnaud Cornet Sun, 18 Oct 2009 14:01:29 +0200 + +haproxy (1.3.18-1) unstable; urgency=low + + * New Upstream Version (Closes: #534583). + * Add contrib directory in docs + + -- Arnaud Cornet Fri, 26 Jun 2009 00:11:01 +0200 + +haproxy (1.3.15.7-2) unstable; urgency=low + + * Fix build without debian/patches directory (Closes: #515682) using + /usr/share/quilt/quilt.make. + + -- Arnaud Cornet Tue, 17 Feb 2009 08:55:12 +0100 + +haproxy (1.3.15.7-1) unstable; urgency=low + + * New Upstream Version. + * Remove upstream patches: + -use_backend-consider-unless.patch + -segfault-url_param+check_post.patch + -server-timeout.patch + -closed-fd-remove.patch + -connection-slot-during-retry.patch + -srv_dynamic_maxconn.patch + -do-not-pause-backends-on-reload.patch + -acl-in-default.patch + -cookie-capture-check.patch + -dead-servers-queue.patch + + -- Arnaud Cornet Mon, 16 Feb 2009 11:20:21 +0100 + +haproxy (1.3.15.2-2~lenny1) testing-proposed-updates; urgency=low + + * Rebuild for lenny to circumvent pcre3 shlibs bump. + + -- Arnaud Cornet Wed, 14 Jan 2009 11:28:36 +0100 + +haproxy (1.3.15.2-2) unstable; urgency=low + + * Add stable branch bug fixes from upstream (Closes: #510185). + - use_backend-consider-unless.patch: consider "unless" in use_backend + - segfault-url_param+check_post.patch: fix segfault with url_param + + check_post + - server-timeout.patch: consider server timeout in all circumstances + - closed-fd-remove.patch: drop info about closed file descriptors + - connection-slot-during-retry.patch: do not release the connection slot + during a retry + - srv_dynamic_maxconn.patch: dynamic connection throttling api fix + - do-not-pause-backends-on-reload.patch: make reload reliable + - acl-in-default.patch: allow acl-related keywords in defaults sections + - cookie-capture-check.patch: cookie capture is declared in the frontend + but checked on the backend + - dead-servers-queue.patch: make dead servers not suck pending connections + * Add quilt build-dependancy. Use quilt in debian/rules to apply + patches. + + -- Arnaud Cornet Wed, 31 Dec 2008 08:50:21 +0100 + +haproxy (1.3.15.2-1) unstable; urgency=low + + * New Upstream Version (Closes: #497186). + + -- Arnaud Cornet Sat, 30 Aug 2008 18:06:31 +0200 + +haproxy (1.3.15.1-1) unstable; urgency=low + + * New Upstream Version + * Upgrade standards version to 3.8.0 (no change needed). + * Build with TARGET=linux26 on linux, TARGET=generic on other systems. + + -- Arnaud Cornet Fri, 20 Jun 2008 00:38:50 +0200 + +haproxy (1.3.14.5-1) unstable; urgency=low + + * New Upstream Version (Closes: #484221) + * Use debhelper 7, drop CDBS. + + -- Arnaud Cornet Wed, 04 Jun 2008 19:21:56 +0200 + +haproxy (1.3.14.3-1) unstable; urgency=low + + * New Upstream Version + * Add status argument support to init-script to conform to LSB. + * Cleanup pidfile after stop in init script. Init script return code fixups. + + -- Arnaud Cornet Sun, 09 Mar 2008 21:30:29 +0100 + +haproxy (1.3.14.2-3) unstable; urgency=low + + * Add init script support for nbproc > 1 in configuration. That is, + multiple haproxy processes. + * Use 'option redispatch' instead of redispatch in debian default + config. + + -- Arnaud Cornet Sun, 03 Feb 2008 18:22:28 +0100 + +haproxy (1.3.14.2-2) unstable; urgency=low + + * Fix init scripts's reload function to use -sf instead of -st (to wait for + active session to finish cleanly). Also support dash. Thanks to + Jean-Baptiste Quenot for noticing. + + -- Arnaud Cornet Thu, 24 Jan 2008 23:47:26 +0100 + +haproxy (1.3.14.2-1) unstable; urgency=low + + * New Upstream Version + * Simplify DEB_MAKE_INVOKE, as upstream now supports us overriding + CFLAGS. + * Move haproxy to usr/sbin. + + -- Arnaud Cornet Mon, 21 Jan 2008 22:42:51 +0100 + +haproxy (1.3.14.1-1) unstable; urgency=low + + * New upstream release. + * Drop dfsg list and hash code rewrite (merged upstream). + * Add a HAPROXY variable in init script. + * Drop makefile patch, fix debian/rules accordingly. Drop build-dependancy + on quilt. + * Manpage now upstream. Ship upstream's and drop ours. + + -- Arnaud Cornet Tue, 01 Jan 2008 22:50:09 +0100 + +haproxy (1.3.12.dfsg2-1) unstable; urgency=low + + * New upstream bugfix release. + * Use new Homepage tag. + * Bump standards-version (no change needed). + * Add build-depend on quilt and add patch to allow proper CFLAGS passing to + make. + + -- Arnaud Cornet Tue, 25 Dec 2007 21:52:59 +0100 + +haproxy (1.3.12.dfsg-1) unstable; urgency=low + + * Initial release (Closes: #416397). + * The DFSG removes files with GPL-incompabitle license and adds a + re-implementation by me. + + -- Arnaud Cornet Fri, 17 Aug 2007 09:33:41 +0200 diff --git a/debian/clean b/debian/clean new file mode 100644 index 0000000..9fc14bf --- /dev/null +++ b/debian/clean @@ -0,0 +1,2 @@ +doc/configuration.html +doc/intro.html diff --git a/debian/compat b/debian/compat new file mode 100644 index 0000000..ec63514 --- /dev/null +++ b/debian/compat @@ -0,0 +1 @@ +9 diff --git a/debian/control b/debian/control new file mode 100644 index 0000000..0690774 --- /dev/null +++ b/debian/control @@ -0,0 +1,69 @@ +Source: haproxy +Section: net +Priority: optional +Maintainer: Debian HAProxy Maintainers +Uploaders: Apollon Oikonomopoulos , + Prach Pongpanich , + Vincent Bernat +Standards-Version: 3.9.6 +Build-Depends: debhelper (>= 9), + libpcre3-dev, + libssl-dev, + liblua5.3-dev, + dh-systemd (>= 1.5), + python-sphinx (>= 1.0.7+dfsg) +Build-Depends-Indep: python, python-mako +Homepage: http://haproxy.1wt.eu/ +Vcs-Git: git://anonscm.debian.org/pkg-haproxy/haproxy.git +Vcs-Browser: http://anonscm.debian.org/gitweb/?p=pkg-haproxy/haproxy.git + +Package: haproxy +Architecture: any +Depends: ${shlibs:Depends}, ${misc:Depends}, adduser +Suggests: vim-haproxy, haproxy-doc +Description: fast and reliable load balancing reverse proxy + HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high + availability environments. It features connection persistence through HTTP + cookies, load balancing, header addition, modification, deletion both ways. It + has request blocking capabilities and provides interface to display server + status. + +Package: haproxy-dbg +Section: debug +Priority: extra +Architecture: any +Depends: ${misc:Depends}, haproxy (= ${binary:Version}) +Description: fast and reliable load balancing reverse proxy (debug symbols) + HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high + availability environments. It features connection persistence through HTTP + cookies, load balancing, header addition, modification, deletion both ways. It + has request blocking capabilities and provides interface to display server + status. + . + This package contains the debugging symbols for haproxy. + +Package: haproxy-doc +Section: doc +Priority: extra +Architecture: all +Depends: ${misc:Depends}, libjs-bootstrap (<< 4), libjs-jquery, + ${sphinxdoc:Depends} +Description: fast and reliable load balancing reverse proxy (HTML documentation) + HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high + availability environments. It features connection persistence through HTTP + cookies, load balancing, header addition, modification, deletion both ways. It + has request blocking capabilities and provides interface to display server + status. + . + This package contains the HTML documentation for haproxy. + +Package: vim-haproxy +Architecture: all +Depends: ${misc:Depends} +Recommends: vim-addon-manager +Description: syntax highlighting for HAProxy configuration files + The vim-haproxy package provides filetype detection and syntax highlighting + for HAProxy configuration files. + . + As per the Debian vim policy, installed addons are not activated + automatically, but the "vim-addon-manager" tool can be used for this purpose. diff --git a/debian/copyright b/debian/copyright new file mode 100644 index 0000000..7267091 --- /dev/null +++ b/debian/copyright @@ -0,0 +1,261 @@ +Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ +Upstream-Name: haproxy +Upstream-Contact: Willy Tarreau +Source: http://haproxy.1wt.eu/ + +Files: * +Copyright: Copyright 2000-2015 Willy Tarreau . +License: GPL-2+ + +Files: ebtree/* + include/* + contrib/halog/fgets2.c +Copyright: Copyright 2000-2013 Willy Tarreau - w@1wt.eu +License: LGPL-2.1 + +Files: include/proto/auth.h + include/types/checks.h + include/types/auth.h + src/auth.c +Copyright: Copyright 2008-2010 Krzysztof Piotr Oledzki +License: GPL-2+ + +Files: include/import/lru.h + src/lru.c +Copyright: Copyright (C) 2015 Willy Tarreau +License: Expat + +Files: include/import/xxhash.h + src/xxhash.c +Copyright: Copyright (C) 2012-2014, Yann Collet. +License: BSD-2-clause + +Files: include/proto/shctx.h + src/shctx.c +Copyright: Copyright (C) 2011-2012 EXCELIANCE +License: GPL-2+ + +Files: include/proto/compression.h + include/types/compression.h +Copyright: Copyright 2012 (C) Exceliance, David Du Colombier + William Lallemand +License: LGPL-2.1 + +Files: include/proto/peers.h + include/proto/ssl_sock.h + include/types/peers.h + include/types/ssl_sock.h +Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/types/dns.h +Copyright: Copyright (C) 2014 Baptiste Assmann +License: LGPL-2.1 + +Files: src/dns.c +Copyright: Copyright (C) 2014 Baptiste Assmann +License: GPL-2+ + +Files: include/types/mailers.h + src/mailers.c +Copyright: Copyright 2015 Horms Solutions Ltd., Simon Horman + Copyright 2010 EXCELIANCE, Emeric Brun +License: LGPL-2.1 + +Files: include/proto/sample.h + include/proto/stick_table.h + include/types/sample.h + include/types/stick_table.h +Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun + Copyright (C) 2010-2013 Willy Tarreau +License: LGPL-2.1 + +Files: include/types/counters.h +Copyright: Copyright 2008-2009 Krzysztof Piotr Oledzki + Copyright 2011 Willy Tarreau +License: LGPL-2.1 + +Files: include/common/base64.h + include/common/uri_auth.h + include/proto/signal.h + include/types/signal.h +Copyright: Copyright 2000-2013 Willy Tarreau +License: GPL-2+ + +Files: include/common/rbtree.h +Copyright: (C) 1999 Andrea Arcangeli +License: GPL-2+ + +Files: src/base64.c + src/checks.c + src/dumpstats.c + src/server.c +Copyright: Copyright 2000-2012 Willy Tarreau + Copyright 2007-2010 Krzysztof Piotr Oledzki +License: GPL-2+ + +Files: src/compression.c +Copyright: Copyright 2012 (C) Exceliance, David Du Colombier + William Lallemand +License: GPL-2+ + +Files: src/haproxy-systemd-wrapper.c +Copyright: Copyright 2013 Marc-Antoine Perennou +License: GPL-2+ + +Files: src/rbtree.c +Copyright: (C) 1999 Andrea Arcangeli + (C) 2002 David Woodhouse +License: GPL-2+ + +Files: src/sample.c + src/stick_table.c +Copyright: Copyright 2009-2010 EXCELIANCE, Emeric Brun + Copyright (C) 2010-2012 Willy Tarreau +License: GPL-2+ + +Files: src/peers.c + src/ssl_sock.c +Copyright: Copyright (C) 2010-2012 EXCELIANCE, Emeric Brun +License: GPL-2+ + +Files: contrib/netsnmp-perl/haproxy.pl + contrib/base64/base64rev-gen.c +Copyright: Copyright 2007-2010 Krzysztof Piotr Oledzki +License: GPL-2+ + +Files: examples/stats_haproxy.sh +Copyright: Copyright 2007 Julien Antony and Matthieu Huguet +License: GPL-2+ + +Files: examples/check +Copyright: 2006-2007 (C) Fabrice Dulaunoy +License: GPL-2+ + +Files: tests/test_pools.c +Copyright: Copyright 2007 Aleksandar Lazic +License: GPL-2+ + +Files: debian/* +Copyright: Copyright (C) 2007-2011, Arnaud Cornet + Copyright (C) 2011, Christo Buschek + Copyright (C) 2013, Prach Pongpanich + Copyright (C) 2013-2014, Apollon Oikonomopoulos + Copyright (C) 2013, Vincent Bernat +License: GPL-2 + +Files: debian/dconv/* +Copyright: Copyright (C) 2012 Cyril Bonté +License: Apache-2.0 + +Files: debian/dconv/js/typeahead.bundle.js +Copyright: Copyright 2013-2015 Twitter, Inc. and other contributors +License: Expat + +License: GPL-2+ + This program is free software; you can redistribute it + and/or modify it under the terms of the GNU General Public + License as published by the Free Software Foundation; either + version 2 of the License, or (at your option) any later + version. + . + This program is distributed in the hope that it will be + useful, but WITHOUT ANY WARRANTY; without even the implied + warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR + PURPOSE. See the GNU General Public License for more + details. + . + You should have received a copy of the GNU General Public + License along with this package; if not, write to the Free + Software Foundation, Inc., 51 Franklin St, Fifth Floor, + Boston, MA 02110-1301 USA + . + On Debian systems, the full text of the GNU General Public + License version 2 can be found in the file + `/usr/share/common-licenses/GPL-2'. + +License: LGPL-2.1 + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + . + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + . + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + . + On Debian systems, the complete text of the GNU Lesser General Public License, + version 2.1, can be found in /usr/share/common-licenses/LGPL-2.1. + +License: GPL-2 + This program is free software; you can redistribute it and/or modify + it under the terms of the GNU General Public License version 2 as + published by the Free Software Foundation. + . + On Debian systems, the complete text of the GNU General Public License, version + 2, can be found in /usr/share/common-licenses/GPL-2. + +License: Apache-2.0 + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + . + http://www.apache.org/licenses/LICENSE-2.0 + . + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + . + On Debian systems, the full text of the Apache License version 2.0 can be + found in the file `/usr/share/common-licenses/Apache-2.0'. + +License: Expat + Permission is hereby granted, free of charge, to any person obtaining + a copy of this software and associated documentation files (the + "Software"), to deal in the Software without restriction, including + without limitation the rights to use, copy, modify, merge, publish, + distribute, sublicense, and/or sell copies of the Software, and to + permit persons to whom the Software is furnished to do so, subject to + the following conditions: + . + The above copyright notice and this permission notice shall be + included in all copies or substantial portions of the Software. + . + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND + NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE + LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION + OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION + WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +License: BSD-2-clause + Redistribution and use in source and binary forms, with or without + modification, are permitted provided that the following conditions are + met: + . + * Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above + copyright notice, this list of conditions and the following disclaimer + in the documentation and/or other materials provided with the + distribution. + . + THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS + "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT + LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR + A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT + OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, + DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY + THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT + (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/debian/dconv/LICENSE b/debian/dconv/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/debian/dconv/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/debian/dconv/NOTICE b/debian/dconv/NOTICE new file mode 100644 index 0000000..c9575a7 --- /dev/null +++ b/debian/dconv/NOTICE @@ -0,0 +1,13 @@ +Copyright 2012 Cyril Bonté + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/debian/dconv/README.md b/debian/dconv/README.md new file mode 100644 index 0000000..4ca89b2 --- /dev/null +++ b/debian/dconv/README.md @@ -0,0 +1,21 @@ +# HAProxy Documentation Converter + +Made to convert the HAProxy documentation into HTML. + +More than HTML, the main goal is to provide easy navigation. + +## Documentations + +A bot periodically fetches last commits for HAProxy 1.4 and 1.5 to produce up-to-date documentations. + +Converted documentations are then stored online : +- HAProxy 1.4 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.4.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.4.html) +- HAProxy 1.5 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.5.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.5.html) +- HAProxy 1.6 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.6.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.6.html) + + +## Contribute + +The project now lives by itself, as it is sufficiently useable. But I'm sure we can do even better. +Feel free to report feature requests or to provide patches ! + diff --git a/debian/dconv/css/check.png b/debian/dconv/css/check.png new file mode 100644 index 0000000000000000000000000000000000000000..a7fab326b02414b6d726c7cbbb770c9f0fe0f5cf GIT binary patch literal 531 zcmV+u0_^>XP)GT6bJC{?o#W_BDOO-Bbx23?74#-f}%r72&r|@{(wkcipax2P?ruJBKvon?q+q$ z&OxQuh)k_kNN3tv-8Bk2b()-=-}}Ak(iUcdcI$h4@IGH25BTqL=FtgfV7&KQV@_=c zpMH4UY1K?yO)E!?gIe8xUVXXcf>RIromS1Xi&gb;(e<#hTtieBO0uyuIz7^T;o0!HKk?xGKF8RVvkQ3(6ia+qa^aU(xE@BztLt-N z3IIl@Mh+3l$GVywD)w97N>3JN0AOIe*D

yVJ73U@W;FDt?toQ0DcGw;=!|F1|S1 zBS}saa{2c3jx^@o5|f}yLeL&Ft6#RP0^(T0^~zZGYfOSNGh^?<#)XOVPluYA;x)Zl zL%Y$2VC_3TfBk|~G6mBx5GR3;ZUf8Ao14Ee03-llcydr>$${RYp`&d#m?a_>Kn!@E zkE*{4AxN3M^?n`xRq*p!Dia5~rXip20I@{bt)N<21qo7SZ|VFZ#DQVt zAV{EA`2-TA%-vb|E8b850Efp8sFFE|!iY%7%>3PjXfwQN_FwH%_g&e$bKCG=@&ik@ V*E|4j%;^9C002ovPDHLkV1nRm_wE1y literal 0 HcmV?d00001 diff --git a/debian/dconv/css/cross.png b/debian/dconv/css/cross.png new file mode 100644 index 0000000000000000000000000000000000000000..24f50645f915ad9465c0e3f4465909d7f2f24873 GIT binary patch literal 640 zcmV-`0)PF9P)~nAV<8xetX&>@3I#YpFrrhsRKIYMGXO9zU%5u>1lcTSz7O4OLN^-FqBW>Vf^i=4 zz=sp$A>u{+E?2ZB%JfaWlWg-B_4K)GluB4O%XyH8T5dr8s>6_k<@<2*0mNbqQK@Jn z$?KbXF93jT^+}J$T`g6@b2)DJ?go?wv<9?-h+~LKbzMCDLHzX!&f7P_5P4LqX^;Zu z=78B*Kq-iFMQchs#9aUY&_n}_fkR1v2#^wx63pUUr!=treFtFe#;p(xk(JE>l7M0n zDDDTOL?)kyWm{2Y=w!!<+ZiO+Z-$Js$j*6if&i`Z5=6DCH4)t2KJFFwVXa)j+R_rz zmDRxmUuS088zdv6A!97EvpG1vhgQ6vwob`2L|ziddga?Ja)mv}77DO?oM_?jKnMW# zhVOpv*nrV!#bva9RJ9@L6+V9y08l li { + white-space: nowrap; +} +/* TEMPORARILY HACKS WHILE PRE TAGS ARE USED +-------------------------------------------------- */ + +h5, +.unpre, +.example-desc, +.dropdown-menu { + font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; + white-space: normal; +} diff --git a/debian/dconv/haproxy-dconv.py b/debian/dconv/haproxy-dconv.py new file mode 100755 index 0000000..fe2b96d --- /dev/null +++ b/debian/dconv/haproxy-dconv.py @@ -0,0 +1,534 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Copyright 2012 Cyril Bonté +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +''' +TODO : ability to split chapters into several files +TODO : manage keyword locality (server/proxy/global ; ex : maxconn) +TODO : Remove global variables where possible +''' +import os +import subprocess +import sys +import cgi +import re +import time +import datetime + +from optparse import OptionParser + +from mako.template import Template +from mako.lookup import TemplateLookup +from mako.exceptions import TopLevelLookupException + +from parser import PContext +from parser import remove_indent +from parser import * + +from urllib import quote + +VERSION = "" +HAPROXY_GIT_VERSION = False + +def main(): + global VERSION, HAPROXY_GIT_VERSION + + usage="Usage: %prog --infile --outfile " + + optparser = OptionParser(description='Generate HTML Document from HAProxy configuation.txt', + version=VERSION, + usage=usage) + optparser.add_option('--infile', '-i', help='Input file mostly the configuration.txt') + optparser.add_option('--outfile','-o', help='Output file') + optparser.add_option('--base','-b', default = '', help='Base directory for relative links') + (option, args) = optparser.parse_args() + + if not (option.infile and option.outfile) or len(args) > 0: + optparser.print_help() + exit(1) + + option.infile = os.path.abspath(option.infile) + option.outfile = os.path.abspath(option.outfile) + + os.chdir(os.path.dirname(__file__)) + + VERSION = get_git_version() + if not VERSION: + sys.exit(1) + + HAPROXY_GIT_VERSION = get_haproxy_git_version(os.path.dirname(option.infile)) + + convert(option.infile, option.outfile, option.base) + + +# Temporarily determine the version from git to follow which commit generated +# the documentation +def get_git_version(): + if not os.path.isdir(".git"): + print >> sys.stderr, "This does not appear to be a Git repository." + return + try: + p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError: + print >> sys.stderr, "Unable to run git" + return + version = p.communicate()[0] + if p.returncode != 0: + print >> sys.stderr, "Unable to run git" + return + + if len(version) < 2: + return + + version = version[1:].strip() + version = re.sub(r'-g.*', '', version) + return version + +def get_haproxy_git_version(path): + try: + p = subprocess.Popen(["git", "describe", "--tags", "--match", "v*"], cwd=path, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + except EnvironmentError: + return False + version = p.communicate()[0] + + if p.returncode != 0: + return False + + if len(version) < 2: + return False + + version = version[1:].strip() + version = re.sub(r'-g.*', '', version) + return version + +def getTitleDetails(string): + array = string.split(".") + + title = array.pop().strip() + chapter = ".".join(array) + level = max(1, len(array)) + if array: + toplevel = array[0] + else: + toplevel = False + + return { + "title" : title, + "chapter" : chapter, + "level" : level, + "toplevel": toplevel + } + +# Parse the whole document to insert links on keywords +def createLinks(): + global document, keywords, keywordsCount, keyword_conflicts, chapters + + print >> sys.stderr, "Generating keywords links..." + + delimiters = [ + dict(start='"', end='"', multi=True ), + dict(start='- ' , end='\n' , multi=False), + ] + + for keyword in keywords: + keywordsCount[keyword] = 0 + for delimiter in delimiters: + keywordsCount[keyword] += document.count(delimiter['start'] + keyword + delimiter['end']) + if (keyword in keyword_conflicts) and (not keywordsCount[keyword]): + # The keyword is never used, we can remove it from the conflicts list + del keyword_conflicts[keyword] + + if keyword in keyword_conflicts: + chapter_list = "" + for chapter in keyword_conflicts[keyword]: + chapter_list += '

  • %s
  • ' % (quote("%s (%s)" % (keyword, chapters[chapter]['title'])), chapters[chapter]['title']) + for delimiter in delimiters: + if delimiter['multi']: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], + delimiter['start'] + '' + + '' + + keyword + + '' + + '' + + '' + + '' + delimiter['end']) + else: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '' + keyword + '' + delimiter['end']) + else: + for delimiter in delimiters: + document = document.replace(delimiter['start'] + keyword + delimiter['end'], delimiter['start'] + '' + keyword + '' + delimiter['end']) + if keyword.startswith("option "): + shortKeyword = keyword[len("option "):] + keywordsCount[shortKeyword] = 0 + for delimiter in delimiters: + keywordsCount[keyword] += document.count(delimiter['start'] + shortKeyword + delimiter['end']) + if (shortKeyword in keyword_conflicts) and (not keywordsCount[shortKeyword]): + # The keyword is never used, we can remove it from the conflicts list + del keyword_conflicts[shortKeyword] + for delimiter in delimiters: + document = document.replace(delimiter['start'] + shortKeyword + delimiter['start'], delimiter['start'] + '' + shortKeyword + '' + delimiter['end']) + +def documentAppend(text, retline = True): + global document + document += text + if retline: + document += "\n" + +def init_parsers(pctxt): + return [ + underline.Parser(pctxt), + arguments.Parser(pctxt), + seealso.Parser(pctxt), + example.Parser(pctxt), + table.Parser(pctxt), + underline.Parser(pctxt), + keyword.Parser(pctxt), + ] + +# The parser itself +def convert(infile, outfile, base=''): + global document, keywords, keywordsCount, chapters, keyword_conflicts + + if len(base) > 0 and base[:-1] != '/': + base += '/' + + hasSummary = False + + data = [] + fd = file(infile,"r") + for line in fd: + line.replace("\t", " " * 8) + line = line.rstrip() + data.append(line) + fd.close() + + pctxt = PContext( + TemplateLookup( + directories=[ + 'templates' + ] + ) + ) + + parsers = init_parsers(pctxt) + + pctxt.context = { + 'headers': {}, + 'document': "", + 'base': base, + } + + sections = [] + currentSection = { + "details": getTitleDetails(""), + "content": "", + } + + chapters = {} + + keywords = {} + keywordsCount = {} + + specialSections = { + "default": { + "hasKeywords": True, + }, + "4.1": { + "hasKeywords": True, + }, + } + + pctxt.keywords = keywords + pctxt.keywordsCount = keywordsCount + pctxt.chapters = chapters + + print >> sys.stderr, "Importing %s..." % infile + + nblines = len(data) + i = j = 0 + while i < nblines: + line = data[i].rstrip() + if i < nblines - 1: + next = data[i + 1].rstrip() + else: + next = "" + if (line == "Summary" or re.match("^[0-9].*", line)) and (len(next) > 0) and (next[0] == '-') \ + and ("-" * len(line)).startswith(next): # Fuzzy underline length detection + sections.append(currentSection) + currentSection = { + "details": getTitleDetails(line), + "content": "", + } + j = 0 + i += 1 # Skip underline + while not data[i + 1].rstrip(): + i += 1 # Skip empty lines + + else: + if len(line) > 80: + print >> sys.stderr, "Line `%i' exceeds 80 columns" % (i + 1) + + currentSection["content"] = currentSection["content"] + line + "\n" + j += 1 + if currentSection["details"]["title"] == "Summary" and line != "": + hasSummary = True + # Learn chapters from the summary + details = getTitleDetails(line) + if details["chapter"]: + chapters[details["chapter"]] = details + i += 1 + sections.append(currentSection) + + chapterIndexes = sorted(chapters.keys()) + + document = "" + + # Complete the summary + for section in sections: + details = section["details"] + title = details["title"] + if title: + fulltitle = title + if details["chapter"]: + #documentAppend("" % details["chapter"]) + fulltitle = details["chapter"] + ". " + title + if not details["chapter"] in chapters: + print >> sys.stderr, "Adding '%s' to the summary" % details["title"] + chapters[details["chapter"]] = details + chapterIndexes = sorted(chapters.keys()) + + for section in sections: + details = section["details"] + pctxt.details = details + level = details["level"] + title = details["title"] + content = section["content"].rstrip() + + print >> sys.stderr, "Parsing chapter %s..." % title + + if (title == "Summary") or (title and not hasSummary): + summaryTemplate = pctxt.templates.get_template('summary.html') + documentAppend(summaryTemplate.render( + pctxt = pctxt, + chapters = chapters, + chapterIndexes = chapterIndexes, + )) + if title and not hasSummary: + hasSummary = True + else: + continue + + if title: + documentAppend('' % (details["chapter"], details["chapter"])) + if level == 1: + documentAppend("
    ", False) + documentAppend('%s. %s' % (level, details["chapter"], details["chapter"], details["chapter"], details["chapter"], cgi.escape(title, True), level)) + if level == 1: + documentAppend("
    ", False) + + if content: + if False and title: + # Display a navigation bar + documentAppend('
      ') + documentAppend('
    • Top
    • ', False) + index = chapterIndexes.index(details["chapter"]) + if index > 0: + documentAppend('' % chapterIndexes[index - 1], False) + if index < len(chapterIndexes) - 1: + documentAppend('' % chapterIndexes[index + 1], False) + documentAppend('
    ', False) + content = cgi.escape(content, True) + content = re.sub(r'section ([0-9]+(.[0-9]+)*)', r'section \1', content) + + pctxt.set_content(content) + + if not title: + lines = pctxt.get_lines() + pctxt.context['headers'] = { + 'title': '', + 'subtitle': '', + 'version': '', + 'author': '', + 'date': '' + } + if re.match("^-+$", pctxt.get_line().strip()): + # Try to analyze the header of the file, assuming it follows + # those rules : + # - it begins with a "separator line" (several '-' chars) + # - then the document title + # - an optional subtitle + # - a new separator line + # - the version + # - the author + # - the date + pctxt.next() + pctxt.context['headers']['title'] = pctxt.get_line().strip() + pctxt.next() + subtitle = "" + while not re.match("^-+$", pctxt.get_line().strip()): + subtitle += " " + pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['subtitle'] += subtitle.strip() + if not pctxt.context['headers']['subtitle']: + # No subtitle, try to guess one from the title if it + # starts with the word "HAProxy" + if pctxt.context['headers']['title'].startswith('HAProxy '): + pctxt.context['headers']['subtitle'] = pctxt.context['headers']['title'][8:] + pctxt.context['headers']['title'] = 'HAProxy' + pctxt.next() + pctxt.context['headers']['version'] = pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['author'] = pctxt.get_line().strip() + pctxt.next() + pctxt.context['headers']['date'] = pctxt.get_line().strip() + pctxt.next() + if HAPROXY_GIT_VERSION: + pctxt.context['headers']['version'] = 'version ' + HAPROXY_GIT_VERSION + + # Skip header lines + pctxt.eat_lines() + pctxt.eat_empty_lines() + + documentAppend('
    ', False) + + delay = [] + while pctxt.has_more_lines(): + try: + specialSection = specialSections[details["chapter"]] + except: + specialSection = specialSections["default"] + + line = pctxt.get_line() + if i < nblines - 1: + nextline = pctxt.get_line(1) + else: + nextline = "" + + oldline = line + pctxt.stop = False + for parser in parsers: + line = parser.parse(line) + if pctxt.stop: + break + if oldline == line: + # nothing has changed, + # delays the rendering + if delay or line != "": + delay.append(line) + pctxt.next() + elif pctxt.stop: + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend(line, False) + else: + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend(line, True) + pctxt.next() + + while delay and delay[-1].strip() == "": + del delay[-1] + if delay: + remove_indent(delay) + documentAppend('
    %s\n
    ' % "\n".join(delay), False) + delay = [] + documentAppend('
    ') + + if not hasSummary: + summaryTemplate = pctxt.templates.get_template('summary.html') + print chapters + document = summaryTemplate.render( + pctxt = pctxt, + chapters = chapters, + chapterIndexes = chapterIndexes, + ) + document + + + # Log warnings for keywords defined in several chapters + keyword_conflicts = {} + for keyword in keywords: + keyword_chapters = list(keywords[keyword]) + keyword_chapters.sort() + if len(keyword_chapters) > 1: + print >> sys.stderr, 'Multi section keyword : "%s" in chapters %s' % (keyword, list(keyword_chapters)) + keyword_conflicts[keyword] = keyword_chapters + + keywords = list(keywords) + keywords.sort() + + createLinks() + + # Add the keywords conflicts to the keywords list to make them available in the search form + # And remove the original keyword which is now useless + for keyword in keyword_conflicts: + sections = keyword_conflicts[keyword] + offset = keywords.index(keyword) + for section in sections: + keywords.insert(offset, "%s (%s)" % (keyword, chapters[section]['title'])) + offset += 1 + keywords.remove(keyword) + + print >> sys.stderr, "Exporting to %s..." % outfile + + template = pctxt.templates.get_template('template.html') + try: + footerTemplate = pctxt.templates.get_template('footer.html') + footer = footerTemplate.render( + pctxt = pctxt, + headers = pctxt.context['headers'], + document = document, + chapters = chapters, + chapterIndexes = chapterIndexes, + keywords = keywords, + keywordsCount = keywordsCount, + keyword_conflicts = keyword_conflicts, + version = VERSION, + date = datetime.datetime.now().strftime("%Y/%m/%d"), + ) + except TopLevelLookupException: + footer = "" + + fd = open(outfile,'w') + + print >> fd, template.render( + pctxt = pctxt, + headers = pctxt.context['headers'], + base = base, + document = document, + chapters = chapters, + chapterIndexes = chapterIndexes, + keywords = keywords, + keywordsCount = keywordsCount, + keyword_conflicts = keyword_conflicts, + version = VERSION, + date = datetime.datetime.now().strftime("%Y/%m/%d"), + footer = footer + ) + fd.close() + +if __name__ == '__main__': + main() diff --git a/debian/dconv/img/logo-med.png b/debian/dconv/img/logo-med.png new file mode 100644 index 0000000000000000000000000000000000000000..1be03b25ac43e1550586d17b3bbd3db4e3085e6d GIT binary patch literal 3522 zcmV;z4L$OSP)FVpOokd5s zCYbqu)oFL%);;V{QegM}3K)F)hluU&?l({8^25-~zBvBcGWGU(2Gzpu#npH3_Qcud zYhWn#_JX{xx(gwuoQH=_)+TaphW`Hk(a_IJPWq?htzd6rZlVk|AXJ6gja-9!nMdBoDX4ai&%}grO zl~4b(_J1_CXsKus9c+OT|GffgECVfCFd=mwjfv=#pgBs*H%H@ed{|Rb&!EkFaUqm` zW3N+4i$J(3si;{fkqHi(401>s3Za*aC&<* zCmb0YPgF2Y%f7T|3%TE`<>3XF+3lsU^r}a#vPoM3Ny5XOiatY zTJK&dW?P18fqrN-#^d4EgXgNz$KiO~AATH3Ih#;|3xayNT&qYW7Jl2j8Wp(|~Z!hkRg-8}_x%&rskG%DojYyesv3r31Mn)ev|>Wc~KXkN7$7 z_lFHVuUeq$(5qgrmwUZl6{0R}@#q6>9Fk6hA<$Sgi-sC;45H`i^K-RS>Q!kXEN83% zZ8$t1BM`(IqTKJ2YT;}`>%G#-RvQP{N;DCaSjyXAL};T>G8(Lbn0eKLUa9&lakooo zRgW?%m!aBdXwaaAq+(+sp@UNC_D+)B6WcvkU508CMnfQ#s21kdH4L?Ksd}p=`R{H? zHdI~4SkvAZWar~a=qhi5()n<5?aFSLp$(>lGxZ?VpkAW#!JSfXkLFJreegi6k#hTu zA(i?K>1wMQaL9n8?zjXvs20AtYNPXGV@z|sPDVVRZ_ux*L9_&qGZjIX`qyG zqeMlRA~Xf>8@~+#3hV+*A+&UH5CJQ~XeyTAffFSrQ%o4V`F9?XfK-m-22tetj*s#0 zYS;Cn$atN&IcVV%J7VBCamxu>16)M#zbzU<69x@AoFd=<1UTIBeSAO?H=D7DV`hot zPdGK52z1RD>3&-@fQX2oKt`7gNOZmkLT}&=0`P&3Za0Z0f^J63IF8;94T|}s7|A8l z`O4QqgI1qd!@t_$~f=xb+1)?a%w<=Ab%bTh=7jTyXbyqp@5m5xiJ6=l-GR4P}9(J@{LjRx6y=A%*sWho<`O@7!Urrhi^sj&BF;^l!3=m z+2te-ehZD>i8EX+Gce{LBH)_fIeG1y03hVBsgEg<7H@tz`FpfTK=`a7wan(a>*P~8 zyVO|^122x&Y!Qh_Jv>g7=}95}9u0{|0jyk$1Xw@BY$JabE&A1}Xs5CxeDbi1mr101 z5iJjvzQTiS@v_b=ow4o6zP#)tSXdcF*0AZ=Z4W#2SO;Dl^Jt5YNE5qdXZGS7s%2u$ zy(6q>zn)W;EnSpZJ>!EK&M(je>O`m>$KWy+I5Xhsn_=FEx;X@+?rkD$8QMT)av#F}IO@MJ(6G}%jE%RPSN$X^&+2-`8Le7i2^G=Iq zo7#eq+C(>`c?+yFzg3nv4H{qLl*iWEw-1x27I2DLG;U!I7|wJ}!L^gLqno(PKV-(4 zb#K~n2h>u-(uVk?F1wW$qz#?CQ?S#b3HnmZSH4p835*HwQ*=1}00#_SgwK$Dtw{72+UPv0`UVH38VkC_6mOon>f$vEA_bDh-r6`3vv;{U1My z#UK0oR~$%4a*}!wHm(i4V8QU+^gNLwAr~h z*7O)?BOQJoQbDRLI!46N(b4NmsIPZOxEq;6^O4NU)ko>dZs8Jny|QQys#SJ3Hr^d- zDb>bCdriwkHAMqqP_1FpG=xv`9*B$=Rig;nY>N$ar-Oo*_QBZ5R0LO^7_h7p9-Ahc=Jqe#d3uyZ8 zRyIyIPEWtKcUhJU4Nm{AiY>MSImK?<)H8)>nz2S`K2o{?wDu$M2eYc7OtZ8%#TI2-OBddsjWs2)EyidgY}Xx! zGM7-;i*~R6Iy&3j)H&5aRxDhbs7e)KdtFA8P>mWZw~LfwmWEWx2ag$RJiy&4ZU2?) z)fG=t7YnMf@EU06n4z>?cifjUnP>1Bv?I#u&gy51V9^++fG0*spj{E#wyV1#qY3lS zyR@X8Xl|IIu?sDF)n2b$yS+C~@$(tl>38~g8H_sp^J7nDIdcL+dwt#6JbMM<4MhkN zCpu@eu9O_qsCJzul!C()mXCMu4!gO8l9R$hHtcAvgw!k9PgQI5jU69vdD;pi2<`Rh zb?4#p&MV-qxRVr3T>|FIwrlPY+GT;!jA{(`?(}eX9c+uLk#4mT&@MumaLaksFj~Vt zb_BMzw%QLDdS)u|M1p8%FW2GA*(=~Uf=Zn6CG5+#R@|{9v{fy2msLZ27VTZTy?eNR zlCarnz}{&Ic9CEUlG}K=%iX;{A)5MqL(lXzn#6Fo!zkZGD)br z2if+zM&=^W3fnqMXaM{Ee)rky?yjAjtXD{zylNuOR14UCsv4t(iLx`QAwCh&y7ui1 zr5A0{P-&akF8P73Xr!%Pr%NuQeSKePuiBgAQ`i%y{=FoWQ02-329KR!H+m|i4LpujDz3!}ABVf1*-|F|d=J{KlZNzy$sD|)QA+pAk!tDm|g3-sy6=XFJv$-scxS za+K^v`6zHL2`2)WrXYp!ncGVlnI~4{OAO6Sna2{V1~!hX$-}u)$fiY01E7}W67;b< zW~p(>R6}s1n(?O)+>?Nl(M0O=Z4H$=*=*m2?WbQXMD{P%MKc~-igM0{#tVO&+Zw;> w`_FgwX3p~veExN-U$`Uw`E9h{=KeSBKgh1kO$zxyNdN!<07*qoM6N<$f;h?6%>V!Z literal 0 HcmV?d00001 diff --git a/debian/dconv/js/typeahead.bundle.js b/debian/dconv/js/typeahead.bundle.js new file mode 100644 index 0000000..bb0c8ae --- /dev/null +++ b/debian/dconv/js/typeahead.bundle.js @@ -0,0 +1,2451 @@ +/*! + * typeahead.js 0.11.1 + * https://github.com/twitter/typeahead.js + * Copyright 2013-2015 Twitter, Inc. and other contributors; Licensed MIT + */ + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define("bloodhound", [ "jquery" ], function(a0) { + return root["Bloodhound"] = factory(a0); + }); + } else if (typeof exports === "object") { + module.exports = factory(require("jquery")); + } else { + root["Bloodhound"] = factory(jQuery); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + noop: function() {} + }; + }(); + var VERSION = "0.11.1"; + var tokenizers = function() { + "use strict"; + return { + nonword: nonword, + whitespace: whitespace, + obj: { + nonword: getObjTokenizer(nonword), + whitespace: getObjTokenizer(whitespace) + } + }; + function whitespace(str) { + str = _.toStr(str); + return str ? str.split(/\s+/) : []; + } + function nonword(str) { + str = _.toStr(str); + return str ? str.split(/\W+/) : []; + } + function getObjTokenizer(tokenizer) { + return function setKey(keys) { + keys = _.isArray(keys) ? keys : [].slice.call(arguments, 0); + return function tokenize(o) { + var tokens = []; + _.each(keys, function(k) { + tokens = tokens.concat(tokenizer(_.toStr(o[k]))); + }); + return tokens; + }; + }; + } + }(); + var LruCache = function() { + "use strict"; + function LruCache(maxSize) { + this.maxSize = _.isNumber(maxSize) ? maxSize : 100; + this.reset(); + if (this.maxSize <= 0) { + this.set = this.get = $.noop; + } + } + _.mixin(LruCache.prototype, { + set: function set(key, val) { + var tailItem = this.list.tail, node; + if (this.size >= this.maxSize) { + this.list.remove(tailItem); + delete this.hash[tailItem.key]; + this.size--; + } + if (node = this.hash[key]) { + node.val = val; + this.list.moveToFront(node); + } else { + node = new Node(key, val); + this.list.add(node); + this.hash[key] = node; + this.size++; + } + }, + get: function get(key) { + var node = this.hash[key]; + if (node) { + this.list.moveToFront(node); + return node.val; + } + }, + reset: function reset() { + this.size = 0; + this.hash = {}; + this.list = new List(); + } + }); + function List() { + this.head = this.tail = null; + } + _.mixin(List.prototype, { + add: function add(node) { + if (this.head) { + node.next = this.head; + this.head.prev = node; + } + this.head = node; + this.tail = this.tail || node; + }, + remove: function remove(node) { + node.prev ? node.prev.next = node.next : this.head = node.next; + node.next ? node.next.prev = node.prev : this.tail = node.prev; + }, + moveToFront: function(node) { + this.remove(node); + this.add(node); + } + }); + function Node(key, val) { + this.key = key; + this.val = val; + this.prev = this.next = null; + } + return LruCache; + }(); + var PersistentStorage = function() { + "use strict"; + var LOCAL_STORAGE; + try { + LOCAL_STORAGE = window.localStorage; + LOCAL_STORAGE.setItem("~~~", "!"); + LOCAL_STORAGE.removeItem("~~~"); + } catch (err) { + LOCAL_STORAGE = null; + } + function PersistentStorage(namespace, override) { + this.prefix = [ "__", namespace, "__" ].join(""); + this.ttlKey = "__ttl__"; + this.keyMatcher = new RegExp("^" + _.escapeRegExChars(this.prefix)); + this.ls = override || LOCAL_STORAGE; + !this.ls && this._noop(); + } + _.mixin(PersistentStorage.prototype, { + _prefix: function(key) { + return this.prefix + key; + }, + _ttlKey: function(key) { + return this._prefix(key) + this.ttlKey; + }, + _noop: function() { + this.get = this.set = this.remove = this.clear = this.isExpired = _.noop; + }, + _safeSet: function(key, val) { + try { + this.ls.setItem(key, val); + } catch (err) { + if (err.name === "QuotaExceededError") { + this.clear(); + this._noop(); + } + } + }, + get: function(key) { + if (this.isExpired(key)) { + this.remove(key); + } + return decode(this.ls.getItem(this._prefix(key))); + }, + set: function(key, val, ttl) { + if (_.isNumber(ttl)) { + this._safeSet(this._ttlKey(key), encode(now() + ttl)); + } else { + this.ls.removeItem(this._ttlKey(key)); + } + return this._safeSet(this._prefix(key), encode(val)); + }, + remove: function(key) { + this.ls.removeItem(this._ttlKey(key)); + this.ls.removeItem(this._prefix(key)); + return this; + }, + clear: function() { + var i, keys = gatherMatchingKeys(this.keyMatcher); + for (i = keys.length; i--; ) { + this.remove(keys[i]); + } + return this; + }, + isExpired: function(key) { + var ttl = decode(this.ls.getItem(this._ttlKey(key))); + return _.isNumber(ttl) && now() > ttl ? true : false; + } + }); + return PersistentStorage; + function now() { + return new Date().getTime(); + } + function encode(val) { + return JSON.stringify(_.isUndefined(val) ? null : val); + } + function decode(val) { + return $.parseJSON(val); + } + function gatherMatchingKeys(keyMatcher) { + var i, key, keys = [], len = LOCAL_STORAGE.length; + for (i = 0; i < len; i++) { + if ((key = LOCAL_STORAGE.key(i)).match(keyMatcher)) { + keys.push(key.replace(keyMatcher, "")); + } + } + return keys; + } + }(); + var Transport = function() { + "use strict"; + var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10); + function Transport(o) { + o = o || {}; + this.cancelled = false; + this.lastReq = null; + this._send = o.transport; + this._get = o.limiter ? o.limiter(this._get) : this._get; + this._cache = o.cache === false ? new LruCache(0) : sharedCache; + } + Transport.setMaxPendingRequests = function setMaxPendingRequests(num) { + maxPendingRequests = num; + }; + Transport.resetCache = function resetCache() { + sharedCache.reset(); + }; + _.mixin(Transport.prototype, { + _fingerprint: function fingerprint(o) { + o = o || {}; + return o.url + o.type + $.param(o.data || {}); + }, + _get: function(o, cb) { + var that = this, fingerprint, jqXhr; + fingerprint = this._fingerprint(o); + if (this.cancelled || fingerprint !== this.lastReq) { + return; + } + if (jqXhr = pendingRequests[fingerprint]) { + jqXhr.done(done).fail(fail); + } else if (pendingRequestsCount < maxPendingRequests) { + pendingRequestsCount++; + pendingRequests[fingerprint] = this._send(o).done(done).fail(fail).always(always); + } else { + this.onDeckRequestArgs = [].slice.call(arguments, 0); + } + function done(resp) { + cb(null, resp); + that._cache.set(fingerprint, resp); + } + function fail() { + cb(true); + } + function always() { + pendingRequestsCount--; + delete pendingRequests[fingerprint]; + if (that.onDeckRequestArgs) { + that._get.apply(that, that.onDeckRequestArgs); + that.onDeckRequestArgs = null; + } + } + }, + get: function(o, cb) { + var resp, fingerprint; + cb = cb || $.noop; + o = _.isString(o) ? { + url: o + } : o || {}; + fingerprint = this._fingerprint(o); + this.cancelled = false; + this.lastReq = fingerprint; + if (resp = this._cache.get(fingerprint)) { + cb(null, resp); + } else { + this._get(o, cb); + } + }, + cancel: function() { + this.cancelled = true; + } + }); + return Transport; + }(); + var SearchIndex = window.SearchIndex = function() { + "use strict"; + var CHILDREN = "c", IDS = "i"; + function SearchIndex(o) { + o = o || {}; + if (!o.datumTokenizer || !o.queryTokenizer) { + $.error("datumTokenizer and queryTokenizer are both required"); + } + this.identify = o.identify || _.stringify; + this.datumTokenizer = o.datumTokenizer; + this.queryTokenizer = o.queryTokenizer; + this.reset(); + } + _.mixin(SearchIndex.prototype, { + bootstrap: function bootstrap(o) { + this.datums = o.datums; + this.trie = o.trie; + }, + add: function(data) { + var that = this; + data = _.isArray(data) ? data : [ data ]; + _.each(data, function(datum) { + var id, tokens; + that.datums[id = that.identify(datum)] = datum; + tokens = normalizeTokens(that.datumTokenizer(datum)); + _.each(tokens, function(token) { + var node, chars, ch; + node = that.trie; + chars = token.split(""); + while (ch = chars.shift()) { + node = node[CHILDREN][ch] || (node[CHILDREN][ch] = newNode()); + node[IDS].push(id); + } + }); + }); + }, + get: function get(ids) { + var that = this; + return _.map(ids, function(id) { + return that.datums[id]; + }); + }, + search: function search(query) { + var that = this, tokens, matches; + tokens = normalizeTokens(this.queryTokenizer(query)); + _.each(tokens, function(token) { + var node, chars, ch, ids; + if (matches && matches.length === 0) { + return false; + } + node = that.trie; + chars = token.split(""); + while (node && (ch = chars.shift())) { + node = node[CHILDREN][ch]; + } + if (node && chars.length === 0) { + ids = node[IDS].slice(0); + matches = matches ? getIntersection(matches, ids) : ids; + } else { + matches = []; + return false; + } + }); + return matches ? _.map(unique(matches), function(id) { + return that.datums[id]; + }) : []; + }, + all: function all() { + var values = []; + for (var key in this.datums) { + values.push(this.datums[key]); + } + return values; + }, + reset: function reset() { + this.datums = {}; + this.trie = newNode(); + }, + serialize: function serialize() { + return { + datums: this.datums, + trie: this.trie + }; + } + }); + return SearchIndex; + function normalizeTokens(tokens) { + tokens = _.filter(tokens, function(token) { + return !!token; + }); + tokens = _.map(tokens, function(token) { + return token.toLowerCase(); + }); + return tokens; + } + function newNode() { + var node = {}; + node[IDS] = []; + node[CHILDREN] = {}; + return node; + } + function unique(array) { + var seen = {}, uniques = []; + for (var i = 0, len = array.length; i < len; i++) { + if (!seen[array[i]]) { + seen[array[i]] = true; + uniques.push(array[i]); + } + } + return uniques; + } + function getIntersection(arrayA, arrayB) { + var ai = 0, bi = 0, intersection = []; + arrayA = arrayA.sort(); + arrayB = arrayB.sort(); + var lenArrayA = arrayA.length, lenArrayB = arrayB.length; + while (ai < lenArrayA && bi < lenArrayB) { + if (arrayA[ai] < arrayB[bi]) { + ai++; + } else if (arrayA[ai] > arrayB[bi]) { + bi++; + } else { + intersection.push(arrayA[ai]); + ai++; + bi++; + } + } + return intersection; + } + }(); + var Prefetch = function() { + "use strict"; + var keys; + keys = { + data: "data", + protocol: "protocol", + thumbprint: "thumbprint" + }; + function Prefetch(o) { + this.url = o.url; + this.ttl = o.ttl; + this.cache = o.cache; + this.prepare = o.prepare; + this.transform = o.transform; + this.transport = o.transport; + this.thumbprint = o.thumbprint; + this.storage = new PersistentStorage(o.cacheKey); + } + _.mixin(Prefetch.prototype, { + _settings: function settings() { + return { + url: this.url, + type: "GET", + dataType: "json" + }; + }, + store: function store(data) { + if (!this.cache) { + return; + } + this.storage.set(keys.data, data, this.ttl); + this.storage.set(keys.protocol, location.protocol, this.ttl); + this.storage.set(keys.thumbprint, this.thumbprint, this.ttl); + }, + fromCache: function fromCache() { + var stored = {}, isExpired; + if (!this.cache) { + return null; + } + stored.data = this.storage.get(keys.data); + stored.protocol = this.storage.get(keys.protocol); + stored.thumbprint = this.storage.get(keys.thumbprint); + isExpired = stored.thumbprint !== this.thumbprint || stored.protocol !== location.protocol; + return stored.data && !isExpired ? stored.data : null; + }, + fromNetwork: function(cb) { + var that = this, settings; + if (!cb) { + return; + } + settings = this.prepare(this._settings()); + this.transport(settings).fail(onError).done(onResponse); + function onError() { + cb(true); + } + function onResponse(resp) { + cb(null, that.transform(resp)); + } + }, + clear: function clear() { + this.storage.clear(); + return this; + } + }); + return Prefetch; + }(); + var Remote = function() { + "use strict"; + function Remote(o) { + this.url = o.url; + this.prepare = o.prepare; + this.transform = o.transform; + this.transport = new Transport({ + cache: o.cache, + limiter: o.limiter, + transport: o.transport + }); + } + _.mixin(Remote.prototype, { + _settings: function settings() { + return { + url: this.url, + type: "GET", + dataType: "json" + }; + }, + get: function get(query, cb) { + var that = this, settings; + if (!cb) { + return; + } + query = query || ""; + settings = this.prepare(query, this._settings()); + return this.transport.get(settings, onResponse); + function onResponse(err, resp) { + err ? cb([]) : cb(that.transform(resp)); + } + }, + cancelLastRequest: function cancelLastRequest() { + this.transport.cancel(); + } + }); + return Remote; + }(); + var oParser = function() { + "use strict"; + return function parse(o) { + var defaults, sorter; + defaults = { + initialize: true, + identify: _.stringify, + datumTokenizer: null, + queryTokenizer: null, + sufficient: 5, + sorter: null, + local: [], + prefetch: null, + remote: null + }; + o = _.mixin(defaults, o || {}); + !o.datumTokenizer && $.error("datumTokenizer is required"); + !o.queryTokenizer && $.error("queryTokenizer is required"); + sorter = o.sorter; + o.sorter = sorter ? function(x) { + return x.sort(sorter); + } : _.identity; + o.local = _.isFunction(o.local) ? o.local() : o.local; + o.prefetch = parsePrefetch(o.prefetch); + o.remote = parseRemote(o.remote); + return o; + }; + function parsePrefetch(o) { + var defaults; + if (!o) { + return null; + } + defaults = { + url: null, + ttl: 24 * 60 * 60 * 1e3, + cache: true, + cacheKey: null, + thumbprint: "", + prepare: _.identity, + transform: _.identity, + transport: null + }; + o = _.isString(o) ? { + url: o + } : o; + o = _.mixin(defaults, o); + !o.url && $.error("prefetch requires url to be set"); + o.transform = o.filter || o.transform; + o.cacheKey = o.cacheKey || o.url; + o.thumbprint = VERSION + o.thumbprint; + o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax; + return o; + } + function parseRemote(o) { + var defaults; + if (!o) { + return; + } + defaults = { + url: null, + cache: true, + prepare: null, + replace: null, + wildcard: null, + limiter: null, + rateLimitBy: "debounce", + rateLimitWait: 300, + transform: _.identity, + transport: null + }; + o = _.isString(o) ? { + url: o + } : o; + o = _.mixin(defaults, o); + !o.url && $.error("remote requires url to be set"); + o.transform = o.filter || o.transform; + o.prepare = toRemotePrepare(o); + o.limiter = toLimiter(o); + o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax; + delete o.replace; + delete o.wildcard; + delete o.rateLimitBy; + delete o.rateLimitWait; + return o; + } + function toRemotePrepare(o) { + var prepare, replace, wildcard; + prepare = o.prepare; + replace = o.replace; + wildcard = o.wildcard; + if (prepare) { + return prepare; + } + if (replace) { + prepare = prepareByReplace; + } else if (o.wildcard) { + prepare = prepareByWildcard; + } else { + prepare = idenityPrepare; + } + return prepare; + function prepareByReplace(query, settings) { + settings.url = replace(settings.url, query); + return settings; + } + function prepareByWildcard(query, settings) { + settings.url = settings.url.replace(wildcard, encodeURIComponent(query)); + return settings; + } + function idenityPrepare(query, settings) { + return settings; + } + } + function toLimiter(o) { + var limiter, method, wait; + limiter = o.limiter; + method = o.rateLimitBy; + wait = o.rateLimitWait; + if (!limiter) { + limiter = /^throttle$/i.test(method) ? throttle(wait) : debounce(wait); + } + return limiter; + function debounce(wait) { + return function debounce(fn) { + return _.debounce(fn, wait); + }; + } + function throttle(wait) { + return function throttle(fn) { + return _.throttle(fn, wait); + }; + } + } + function callbackToDeferred(fn) { + return function wrapper(o) { + var deferred = $.Deferred(); + fn(o, onSuccess, onError); + return deferred; + function onSuccess(resp) { + _.defer(function() { + deferred.resolve(resp); + }); + } + function onError(err) { + _.defer(function() { + deferred.reject(err); + }); + } + }; + } + }(); + var Bloodhound = function() { + "use strict"; + var old; + old = window && window.Bloodhound; + function Bloodhound(o) { + o = oParser(o); + this.sorter = o.sorter; + this.identify = o.identify; + this.sufficient = o.sufficient; + this.local = o.local; + this.remote = o.remote ? new Remote(o.remote) : null; + this.prefetch = o.prefetch ? new Prefetch(o.prefetch) : null; + this.index = new SearchIndex({ + identify: this.identify, + datumTokenizer: o.datumTokenizer, + queryTokenizer: o.queryTokenizer + }); + o.initialize !== false && this.initialize(); + } + Bloodhound.noConflict = function noConflict() { + window && (window.Bloodhound = old); + return Bloodhound; + }; + Bloodhound.tokenizers = tokenizers; + _.mixin(Bloodhound.prototype, { + __ttAdapter: function ttAdapter() { + var that = this; + return this.remote ? withAsync : withoutAsync; + function withAsync(query, sync, async) { + return that.search(query, sync, async); + } + function withoutAsync(query, sync) { + return that.search(query, sync); + } + }, + _loadPrefetch: function loadPrefetch() { + var that = this, deferred, serialized; + deferred = $.Deferred(); + if (!this.prefetch) { + deferred.resolve(); + } else if (serialized = this.prefetch.fromCache()) { + this.index.bootstrap(serialized); + deferred.resolve(); + } else { + this.prefetch.fromNetwork(done); + } + return deferred.promise(); + function done(err, data) { + if (err) { + return deferred.reject(); + } + that.add(data); + that.prefetch.store(that.index.serialize()); + deferred.resolve(); + } + }, + _initialize: function initialize() { + var that = this, deferred; + this.clear(); + (this.initPromise = this._loadPrefetch()).done(addLocalToIndex); + return this.initPromise; + function addLocalToIndex() { + that.add(that.local); + } + }, + initialize: function initialize(force) { + return !this.initPromise || force ? this._initialize() : this.initPromise; + }, + add: function add(data) { + this.index.add(data); + return this; + }, + get: function get(ids) { + ids = _.isArray(ids) ? ids : [].slice.call(arguments); + return this.index.get(ids); + }, + search: function search(query, sync, async) { + var that = this, local; + local = this.sorter(this.index.search(query)); + sync(this.remote ? local.slice() : local); + if (this.remote && local.length < this.sufficient) { + this.remote.get(query, processRemote); + } else if (this.remote) { + this.remote.cancelLastRequest(); + } + return this; + function processRemote(remote) { + var nonDuplicates = []; + _.each(remote, function(r) { + !_.some(local, function(l) { + return that.identify(r) === that.identify(l); + }) && nonDuplicates.push(r); + }); + async && async(nonDuplicates); + } + }, + all: function all() { + return this.index.all(); + }, + clear: function clear() { + this.index.reset(); + return this; + }, + clearPrefetchCache: function clearPrefetchCache() { + this.prefetch && this.prefetch.clear(); + return this; + }, + clearRemoteCache: function clearRemoteCache() { + Transport.resetCache(); + return this; + }, + ttAdapter: function ttAdapter() { + return this.__ttAdapter(); + } + }); + return Bloodhound; + }(); + return Bloodhound; +}); + +(function(root, factory) { + if (typeof define === "function" && define.amd) { + define("typeahead.js", [ "jquery" ], function(a0) { + return factory(a0); + }); + } else if (typeof exports === "object") { + module.exports = factory(require("jquery")); + } else { + factory(jQuery); + } +})(this, function($) { + var _ = function() { + "use strict"; + return { + isMsie: function() { + return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false; + }, + isBlankString: function(str) { + return !str || /^\s*$/.test(str); + }, + escapeRegExChars: function(str) { + return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&"); + }, + isString: function(obj) { + return typeof obj === "string"; + }, + isNumber: function(obj) { + return typeof obj === "number"; + }, + isArray: $.isArray, + isFunction: $.isFunction, + isObject: $.isPlainObject, + isUndefined: function(obj) { + return typeof obj === "undefined"; + }, + isElement: function(obj) { + return !!(obj && obj.nodeType === 1); + }, + isJQuery: function(obj) { + return obj instanceof $; + }, + toStr: function toStr(s) { + return _.isUndefined(s) || s === null ? "" : s + ""; + }, + bind: $.proxy, + each: function(collection, cb) { + $.each(collection, reverseArgs); + function reverseArgs(index, value) { + return cb(value, index); + } + }, + map: $.map, + filter: $.grep, + every: function(obj, test) { + var result = true; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (!(result = test.call(null, val, key, obj))) { + return false; + } + }); + return !!result; + }, + some: function(obj, test) { + var result = false; + if (!obj) { + return result; + } + $.each(obj, function(key, val) { + if (result = test.call(null, val, key, obj)) { + return false; + } + }); + return !!result; + }, + mixin: $.extend, + identity: function(x) { + return x; + }, + clone: function(obj) { + return $.extend(true, {}, obj); + }, + getIdGenerator: function() { + var counter = 0; + return function() { + return counter++; + }; + }, + templatify: function templatify(obj) { + return $.isFunction(obj) ? obj : template; + function template() { + return String(obj); + } + }, + defer: function(fn) { + setTimeout(fn, 0); + }, + debounce: function(func, wait, immediate) { + var timeout, result; + return function() { + var context = this, args = arguments, later, callNow; + later = function() { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + } + }; + callNow = immediate && !timeout; + clearTimeout(timeout); + timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + } + return result; + }; + }, + throttle: function(func, wait) { + var context, args, timeout, result, previous, later; + previous = 0; + later = function() { + previous = new Date(); + timeout = null; + result = func.apply(context, args); + }; + return function() { + var now = new Date(), remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0) { + clearTimeout(timeout); + timeout = null; + previous = now; + result = func.apply(context, args); + } else if (!timeout) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }, + stringify: function(val) { + return _.isString(val) ? val : JSON.stringify(val); + }, + noop: function() {} + }; + }(); + var WWW = function() { + "use strict"; + var defaultClassNames = { + wrapper: "twitter-typeahead", + input: "tt-input", + hint: "tt-hint", + menu: "tt-menu", + dataset: "tt-dataset", + suggestion: "tt-suggestion", + selectable: "tt-selectable", + empty: "tt-empty", + open: "tt-open", + cursor: "tt-cursor", + highlight: "tt-highlight" + }; + return build; + function build(o) { + var www, classes; + classes = _.mixin({}, defaultClassNames, o); + www = { + css: buildCss(), + classes: classes, + html: buildHtml(classes), + selectors: buildSelectors(classes) + }; + return { + css: www.css, + html: www.html, + classes: www.classes, + selectors: www.selectors, + mixin: function(o) { + _.mixin(o, www); + } + }; + } + function buildHtml(c) { + return { + wrapper: '', + menu: '
    ' + }; + } + function buildSelectors(classes) { + var selectors = {}; + _.each(classes, function(v, k) { + selectors[k] = "." + v; + }); + return selectors; + } + function buildCss() { + var css = { + wrapper: { + position: "relative", + display: "inline-block" + }, + hint: { + position: "absolute", + top: "0", + left: "0", + borderColor: "transparent", + boxShadow: "none", + opacity: "1" + }, + input: { + position: "relative", + verticalAlign: "top", + backgroundColor: "transparent" + }, + inputWithNoHint: { + position: "relative", + verticalAlign: "top" + }, + menu: { + position: "absolute", + top: "100%", + left: "0", + zIndex: "100", + display: "none" + }, + ltr: { + left: "0", + right: "auto" + }, + rtl: { + left: "auto", + right: " 0" + } + }; + if (_.isMsie()) { + _.mixin(css.input, { + backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)" + }); + } + return css; + } + }(); + var EventBus = function() { + "use strict"; + var namespace, deprecationMap; + namespace = "typeahead:"; + deprecationMap = { + render: "rendered", + cursorchange: "cursorchanged", + select: "selected", + autocomplete: "autocompleted" + }; + function EventBus(o) { + if (!o || !o.el) { + $.error("EventBus initialized without el"); + } + this.$el = $(o.el); + } + _.mixin(EventBus.prototype, { + _trigger: function(type, args) { + var $e; + $e = $.Event(namespace + type); + (args = args || []).unshift($e); + this.$el.trigger.apply(this.$el, args); + return $e; + }, + before: function(type) { + var args, $e; + args = [].slice.call(arguments, 1); + $e = this._trigger("before" + type, args); + return $e.isDefaultPrevented(); + }, + trigger: function(type) { + var deprecatedType; + this._trigger(type, [].slice.call(arguments, 1)); + if (deprecatedType = deprecationMap[type]) { + this._trigger(deprecatedType, [].slice.call(arguments, 1)); + } + } + }); + return EventBus; + }(); + var EventEmitter = function() { + "use strict"; + var splitter = /\s+/, nextTick = getNextTick(); + return { + onSync: onSync, + onAsync: onAsync, + off: off, + trigger: trigger + }; + function on(method, types, cb, context) { + var type; + if (!cb) { + return this; + } + types = types.split(splitter); + cb = context ? bindContext(cb, context) : cb; + this._callbacks = this._callbacks || {}; + while (type = types.shift()) { + this._callbacks[type] = this._callbacks[type] || { + sync: [], + async: [] + }; + this._callbacks[type][method].push(cb); + } + return this; + } + function onAsync(types, cb, context) { + return on.call(this, "async", types, cb, context); + } + function onSync(types, cb, context) { + return on.call(this, "sync", types, cb, context); + } + function off(types) { + var type; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + while (type = types.shift()) { + delete this._callbacks[type]; + } + return this; + } + function trigger(types) { + var type, callbacks, args, syncFlush, asyncFlush; + if (!this._callbacks) { + return this; + } + types = types.split(splitter); + args = [].slice.call(arguments, 1); + while ((type = types.shift()) && (callbacks = this._callbacks[type])) { + syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args)); + asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args)); + syncFlush() && nextTick(asyncFlush); + } + return this; + } + function getFlush(callbacks, context, args) { + return flush; + function flush() { + var cancelled; + for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) { + cancelled = callbacks[i].apply(context, args) === false; + } + return !cancelled; + } + } + function getNextTick() { + var nextTickFn; + if (window.setImmediate) { + nextTickFn = function nextTickSetImmediate(fn) { + setImmediate(function() { + fn(); + }); + }; + } else { + nextTickFn = function nextTickSetTimeout(fn) { + setTimeout(function() { + fn(); + }, 0); + }; + } + return nextTickFn; + } + function bindContext(fn, context) { + return fn.bind ? fn.bind(context) : function() { + fn.apply(context, [].slice.call(arguments, 0)); + }; + } + }(); + var highlight = function(doc) { + "use strict"; + var defaults = { + node: null, + pattern: null, + tagName: "strong", + className: null, + wordsOnly: false, + caseSensitive: false + }; + return function hightlight(o) { + var regex; + o = _.mixin({}, defaults, o); + if (!o.node || !o.pattern) { + return; + } + o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ]; + regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly); + traverse(o.node, hightlightTextNode); + function hightlightTextNode(textNode) { + var match, patternNode, wrapperNode; + if (match = regex.exec(textNode.data)) { + wrapperNode = doc.createElement(o.tagName); + o.className && (wrapperNode.className = o.className); + patternNode = textNode.splitText(match.index); + patternNode.splitText(match[0].length); + wrapperNode.appendChild(patternNode.cloneNode(true)); + textNode.parentNode.replaceChild(wrapperNode, patternNode); + } + return !!match; + } + function traverse(el, hightlightTextNode) { + var childNode, TEXT_NODE_TYPE = 3; + for (var i = 0; i < el.childNodes.length; i++) { + childNode = el.childNodes[i]; + if (childNode.nodeType === TEXT_NODE_TYPE) { + i += hightlightTextNode(childNode) ? 1 : 0; + } else { + traverse(childNode, hightlightTextNode); + } + } + } + }; + function getRegex(patterns, caseSensitive, wordsOnly) { + var escapedPatterns = [], regexStr; + for (var i = 0, len = patterns.length; i < len; i++) { + escapedPatterns.push(_.escapeRegExChars(patterns[i])); + } + regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")"; + return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i"); + } + }(window.document); + var Input = function() { + "use strict"; + var specialKeyCodeMap; + specialKeyCodeMap = { + 9: "tab", + 27: "esc", + 37: "left", + 39: "right", + 13: "enter", + 38: "up", + 40: "down" + }; + function Input(o, www) { + o = o || {}; + if (!o.input) { + $.error("input is missing"); + } + www.mixin(this); + this.$hint = $(o.hint); + this.$input = $(o.input); + this.query = this.$input.val(); + this.queryWhenFocused = this.hasFocus() ? this.query : null; + this.$overflowHelper = buildOverflowHelper(this.$input); + this._checkLanguageDirection(); + if (this.$hint.length === 0) { + this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop; + } + } + Input.normalizeQuery = function(str) { + return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " "); + }; + _.mixin(Input.prototype, EventEmitter, { + _onBlur: function onBlur() { + this.resetInputValue(); + this.trigger("blurred"); + }, + _onFocus: function onFocus() { + this.queryWhenFocused = this.query; + this.trigger("focused"); + }, + _onKeydown: function onKeydown($e) { + var keyName = specialKeyCodeMap[$e.which || $e.keyCode]; + this._managePreventDefault(keyName, $e); + if (keyName && this._shouldTrigger(keyName, $e)) { + this.trigger(keyName + "Keyed", $e); + } + }, + _onInput: function onInput() { + this._setQuery(this.getInputValue()); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + _managePreventDefault: function managePreventDefault(keyName, $e) { + var preventDefault; + switch (keyName) { + case "up": + case "down": + preventDefault = !withModifier($e); + break; + + default: + preventDefault = false; + } + preventDefault && $e.preventDefault(); + }, + _shouldTrigger: function shouldTrigger(keyName, $e) { + var trigger; + switch (keyName) { + case "tab": + trigger = !withModifier($e); + break; + + default: + trigger = true; + } + return trigger; + }, + _checkLanguageDirection: function checkLanguageDirection() { + var dir = (this.$input.css("direction") || "ltr").toLowerCase(); + if (this.dir !== dir) { + this.dir = dir; + this.$hint.attr("dir", dir); + this.trigger("langDirChanged", dir); + } + }, + _setQuery: function setQuery(val, silent) { + var areEquivalent, hasDifferentWhitespace; + areEquivalent = areQueriesEquivalent(val, this.query); + hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false; + this.query = val; + if (!silent && !areEquivalent) { + this.trigger("queryChanged", this.query); + } else if (!silent && hasDifferentWhitespace) { + this.trigger("whitespaceChanged", this.query); + } + }, + bind: function() { + var that = this, onBlur, onFocus, onKeydown, onInput; + onBlur = _.bind(this._onBlur, this); + onFocus = _.bind(this._onFocus, this); + onKeydown = _.bind(this._onKeydown, this); + onInput = _.bind(this._onInput, this); + this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown); + if (!_.isMsie() || _.isMsie() > 9) { + this.$input.on("input.tt", onInput); + } else { + this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) { + if (specialKeyCodeMap[$e.which || $e.keyCode]) { + return; + } + _.defer(_.bind(that._onInput, that, $e)); + }); + } + return this; + }, + focus: function focus() { + this.$input.focus(); + }, + blur: function blur() { + this.$input.blur(); + }, + getLangDir: function getLangDir() { + return this.dir; + }, + getQuery: function getQuery() { + return this.query || ""; + }, + setQuery: function setQuery(val, silent) { + this.setInputValue(val); + this._setQuery(val, silent); + }, + hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() { + return this.query !== this.queryWhenFocused; + }, + getInputValue: function getInputValue() { + return this.$input.val(); + }, + setInputValue: function setInputValue(value) { + this.$input.val(value); + this.clearHintIfInvalid(); + this._checkLanguageDirection(); + }, + resetInputValue: function resetInputValue() { + this.setInputValue(this.query); + }, + getHint: function getHint() { + return this.$hint.val(); + }, + setHint: function setHint(value) { + this.$hint.val(value); + }, + clearHint: function clearHint() { + this.setHint(""); + }, + clearHintIfInvalid: function clearHintIfInvalid() { + var val, hint, valIsPrefixOfHint, isValid; + val = this.getInputValue(); + hint = this.getHint(); + valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0; + isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow(); + !isValid && this.clearHint(); + }, + hasFocus: function hasFocus() { + return this.$input.is(":focus"); + }, + hasOverflow: function hasOverflow() { + var constraint = this.$input.width() - 2; + this.$overflowHelper.text(this.getInputValue()); + return this.$overflowHelper.width() >= constraint; + }, + isCursorAtEnd: function() { + var valueLength, selectionStart, range; + valueLength = this.$input.val().length; + selectionStart = this.$input[0].selectionStart; + if (_.isNumber(selectionStart)) { + return selectionStart === valueLength; + } else if (document.selection) { + range = document.selection.createRange(); + range.moveStart("character", -valueLength); + return valueLength === range.text.length; + } + return true; + }, + destroy: function destroy() { + this.$hint.off(".tt"); + this.$input.off(".tt"); + this.$overflowHelper.remove(); + this.$hint = this.$input = this.$overflowHelper = $("
    "); + } + }); + return Input; + function buildOverflowHelper($input) { + return $('').css({ + position: "absolute", + visibility: "hidden", + whiteSpace: "pre", + fontFamily: $input.css("font-family"), + fontSize: $input.css("font-size"), + fontStyle: $input.css("font-style"), + fontVariant: $input.css("font-variant"), + fontWeight: $input.css("font-weight"), + wordSpacing: $input.css("word-spacing"), + letterSpacing: $input.css("letter-spacing"), + textIndent: $input.css("text-indent"), + textRendering: $input.css("text-rendering"), + textTransform: $input.css("text-transform") + }).insertAfter($input); + } + function areQueriesEquivalent(a, b) { + return Input.normalizeQuery(a) === Input.normalizeQuery(b); + } + function withModifier($e) { + return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey; + } + }(); + var Dataset = function() { + "use strict"; + var keys, nameGenerator; + keys = { + val: "tt-selectable-display", + obj: "tt-selectable-object" + }; + nameGenerator = _.getIdGenerator(); + function Dataset(o, www) { + o = o || {}; + o.templates = o.templates || {}; + o.templates.notFound = o.templates.notFound || o.templates.empty; + if (!o.source) { + $.error("missing source"); + } + if (!o.node) { + $.error("missing node"); + } + if (o.name && !isValidName(o.name)) { + $.error("invalid dataset name: " + o.name); + } + www.mixin(this); + this.highlight = !!o.highlight; + this.name = o.name || nameGenerator(); + this.limit = o.limit || 5; + this.displayFn = getDisplayFn(o.display || o.displayKey); + this.templates = getTemplates(o.templates, this.displayFn); + this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source; + this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async; + this._resetLastSuggestion(); + this.$el = $(o.node).addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name); + } + Dataset.extractData = function extractData(el) { + var $el = $(el); + if ($el.data(keys.obj)) { + return { + val: $el.data(keys.val) || "", + obj: $el.data(keys.obj) || null + }; + } + return null; + }; + _.mixin(Dataset.prototype, EventEmitter, { + _overwrite: function overwrite(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (this.async && this.templates.pending) { + this._renderPending(query); + } else if (!this.async && this.templates.notFound) { + this._renderNotFound(query); + } else { + this._empty(); + } + this.trigger("rendered", this.name, suggestions, false); + }, + _append: function append(query, suggestions) { + suggestions = suggestions || []; + if (suggestions.length && this.$lastSuggestion.length) { + this._appendSuggestions(query, suggestions); + } else if (suggestions.length) { + this._renderSuggestions(query, suggestions); + } else if (!this.$lastSuggestion.length && this.templates.notFound) { + this._renderNotFound(query); + } + this.trigger("rendered", this.name, suggestions, true); + }, + _renderSuggestions: function renderSuggestions(query, suggestions) { + var $fragment; + $fragment = this._getSuggestionsFragment(query, suggestions); + this.$lastSuggestion = $fragment.children().last(); + this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions)); + }, + _appendSuggestions: function appendSuggestions(query, suggestions) { + var $fragment, $lastSuggestion; + $fragment = this._getSuggestionsFragment(query, suggestions); + $lastSuggestion = $fragment.children().last(); + this.$lastSuggestion.after($fragment); + this.$lastSuggestion = $lastSuggestion; + }, + _renderPending: function renderPending(query) { + var template = this.templates.pending; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _renderNotFound: function renderNotFound(query) { + var template = this.templates.notFound; + this._resetLastSuggestion(); + template && this.$el.html(template({ + query: query, + dataset: this.name + })); + }, + _empty: function empty() { + this.$el.empty(); + this._resetLastSuggestion(); + }, + _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) { + var that = this, fragment; + fragment = document.createDocumentFragment(); + _.each(suggestions, function getSuggestionNode(suggestion) { + var $el, context; + context = that._injectQuery(query, suggestion); + $el = $(that.templates.suggestion(context)).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable); + fragment.appendChild($el[0]); + }); + this.highlight && highlight({ + className: this.classes.highlight, + node: fragment, + pattern: query + }); + return $(fragment); + }, + _getFooter: function getFooter(query, suggestions) { + return this.templates.footer ? this.templates.footer({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _getHeader: function getHeader(query, suggestions) { + return this.templates.header ? this.templates.header({ + query: query, + suggestions: suggestions, + dataset: this.name + }) : null; + }, + _resetLastSuggestion: function resetLastSuggestion() { + this.$lastSuggestion = $(); + }, + _injectQuery: function injectQuery(query, obj) { + return _.isObject(obj) ? _.mixin({ + _query: query + }, obj) : obj; + }, + update: function update(query) { + var that = this, canceled = false, syncCalled = false, rendered = 0; + this.cancel(); + this.cancel = function cancel() { + canceled = true; + that.cancel = $.noop; + that.async && that.trigger("asyncCanceled", query); + }; + this.source(query, sync, async); + !syncCalled && sync([]); + function sync(suggestions) { + if (syncCalled) { + return; + } + syncCalled = true; + suggestions = (suggestions || []).slice(0, that.limit); + rendered = suggestions.length; + that._overwrite(query, suggestions); + if (rendered < that.limit && that.async) { + that.trigger("asyncRequested", query); + } + } + function async(suggestions) { + suggestions = suggestions || []; + if (!canceled && rendered < that.limit) { + that.cancel = $.noop; + rendered += suggestions.length; + that._append(query, suggestions.slice(0, that.limit - rendered)); + that.async && that.trigger("asyncReceived", query); + } + } + }, + cancel: $.noop, + clear: function clear() { + this._empty(); + this.cancel(); + this.trigger("cleared"); + }, + isEmpty: function isEmpty() { + return this.$el.is(":empty"); + }, + destroy: function destroy() { + this.$el = $("
    "); + } + }); + return Dataset; + function getDisplayFn(display) { + display = display || _.stringify; + return _.isFunction(display) ? display : displayFn; + function displayFn(obj) { + return obj[display]; + } + } + function getTemplates(templates, displayFn) { + return { + notFound: templates.notFound && _.templatify(templates.notFound), + pending: templates.pending && _.templatify(templates.pending), + header: templates.header && _.templatify(templates.header), + footer: templates.footer && _.templatify(templates.footer), + suggestion: templates.suggestion || suggestionTemplate + }; + function suggestionTemplate(context) { + return $("
    ").text(displayFn(context)); + } + } + function isValidName(str) { + return /^[_a-zA-Z0-9-]+$/.test(str); + } + }(); + var Menu = function() { + "use strict"; + function Menu(o, www) { + var that = this; + o = o || {}; + if (!o.node) { + $.error("node is required"); + } + www.mixin(this); + this.$node = $(o.node); + this.query = null; + this.datasets = _.map(o.datasets, initializeDataset); + function initializeDataset(oDataset) { + var node = that.$node.find(oDataset.node).first(); + oDataset.node = node.length ? node : $("
    ").appendTo(that.$node); + return new Dataset(oDataset, www); + } + } + _.mixin(Menu.prototype, EventEmitter, { + _onSelectableClick: function onSelectableClick($e) { + this.trigger("selectableClicked", $($e.currentTarget)); + }, + _onRendered: function onRendered(type, dataset, suggestions, async) { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetRendered", dataset, suggestions, async); + }, + _onCleared: function onCleared() { + this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty()); + this.trigger("datasetCleared"); + }, + _propagate: function propagate() { + this.trigger.apply(this, arguments); + }, + _allDatasetsEmpty: function allDatasetsEmpty() { + return _.every(this.datasets, isDatasetEmpty); + function isDatasetEmpty(dataset) { + return dataset.isEmpty(); + } + }, + _getSelectables: function getSelectables() { + return this.$node.find(this.selectors.selectable); + }, + _removeCursor: function _removeCursor() { + var $selectable = this.getActiveSelectable(); + $selectable && $selectable.removeClass(this.classes.cursor); + }, + _ensureVisible: function ensureVisible($el) { + var elTop, elBottom, nodeScrollTop, nodeHeight; + elTop = $el.position().top; + elBottom = elTop + $el.outerHeight(true); + nodeScrollTop = this.$node.scrollTop(); + nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10); + if (elTop < 0) { + this.$node.scrollTop(nodeScrollTop + elTop); + } else if (nodeHeight < elBottom) { + this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight)); + } + }, + bind: function() { + var that = this, onSelectableClick; + onSelectableClick = _.bind(this._onSelectableClick, this); + this.$node.on("click.tt", this.selectors.selectable, onSelectableClick); + _.each(this.datasets, function(dataset) { + dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that); + }); + return this; + }, + isOpen: function isOpen() { + return this.$node.hasClass(this.classes.open); + }, + open: function open() { + this.$node.addClass(this.classes.open); + }, + close: function close() { + this.$node.removeClass(this.classes.open); + this._removeCursor(); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.attr("dir", dir); + }, + selectableRelativeToCursor: function selectableRelativeToCursor(delta) { + var $selectables, $oldCursor, oldIndex, newIndex; + $oldCursor = this.getActiveSelectable(); + $selectables = this._getSelectables(); + oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1; + newIndex = oldIndex + delta; + newIndex = (newIndex + 1) % ($selectables.length + 1) - 1; + newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex; + return newIndex === -1 ? null : $selectables.eq(newIndex); + }, + setCursor: function setCursor($selectable) { + this._removeCursor(); + if ($selectable = $selectable && $selectable.first()) { + $selectable.addClass(this.classes.cursor); + this._ensureVisible($selectable); + } + }, + getSelectableData: function getSelectableData($el) { + return $el && $el.length ? Dataset.extractData($el) : null; + }, + getActiveSelectable: function getActiveSelectable() { + var $selectable = this._getSelectables().filter(this.selectors.cursor).first(); + return $selectable.length ? $selectable : null; + }, + getTopSelectable: function getTopSelectable() { + var $selectable = this._getSelectables().first(); + return $selectable.length ? $selectable : null; + }, + update: function update(query) { + var isValidUpdate = query !== this.query; + if (isValidUpdate) { + this.query = query; + _.each(this.datasets, updateDataset); + } + return isValidUpdate; + function updateDataset(dataset) { + dataset.update(query); + } + }, + empty: function empty() { + _.each(this.datasets, clearDataset); + this.query = null; + this.$node.addClass(this.classes.empty); + function clearDataset(dataset) { + dataset.clear(); + } + }, + destroy: function destroy() { + this.$node.off(".tt"); + this.$node = $("
    "); + _.each(this.datasets, destroyDataset); + function destroyDataset(dataset) { + dataset.destroy(); + } + } + }); + return Menu; + }(); + var DefaultMenu = function() { + "use strict"; + var s = Menu.prototype; + function DefaultMenu() { + Menu.apply(this, [].slice.call(arguments, 0)); + } + _.mixin(DefaultMenu.prototype, Menu.prototype, { + open: function open() { + !this._allDatasetsEmpty() && this._show(); + return s.open.apply(this, [].slice.call(arguments, 0)); + }, + close: function close() { + this._hide(); + return s.close.apply(this, [].slice.call(arguments, 0)); + }, + _onRendered: function onRendered() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onRendered.apply(this, [].slice.call(arguments, 0)); + }, + _onCleared: function onCleared() { + if (this._allDatasetsEmpty()) { + this._hide(); + } else { + this.isOpen() && this._show(); + } + return s._onCleared.apply(this, [].slice.call(arguments, 0)); + }, + setLanguageDirection: function setLanguageDirection(dir) { + this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl); + return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0)); + }, + _hide: function hide() { + this.$node.hide(); + }, + _show: function show() { + this.$node.css("display", "block"); + } + }); + return DefaultMenu; + }(); + var Typeahead = function() { + "use strict"; + function Typeahead(o, www) { + var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged; + o = o || {}; + if (!o.input) { + $.error("missing input"); + } + if (!o.menu) { + $.error("missing menu"); + } + if (!o.eventBus) { + $.error("missing event bus"); + } + www.mixin(this); + this.eventBus = o.eventBus; + this.minLength = _.isNumber(o.minLength) ? o.minLength : 1; + this.input = o.input; + this.menu = o.menu; + this.enabled = true; + this.active = false; + this.input.hasFocus() && this.activate(); + this.dir = this.input.getLangDir(); + this._hacks(); + this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this); + onFocused = c(this, "activate", "open", "_onFocused"); + onBlurred = c(this, "deactivate", "_onBlurred"); + onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed"); + onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed"); + onEscKeyed = c(this, "isActive", "_onEscKeyed"); + onUpKeyed = c(this, "isActive", "open", "_onUpKeyed"); + onDownKeyed = c(this, "isActive", "open", "_onDownKeyed"); + onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed"); + onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed"); + onQueryChanged = c(this, "_openIfActive", "_onQueryChanged"); + onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged"); + this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this); + } + _.mixin(Typeahead.prototype, { + _hacks: function hacks() { + var $input, $menu; + $input = this.input.$input || $("
    "); + $menu = this.menu.$node || $("
    "); + $input.on("blur.tt", function($e) { + var active, isActive, hasActive; + active = document.activeElement; + isActive = $menu.is(active); + hasActive = $menu.has(active).length > 0; + if (_.isMsie() && (isActive || hasActive)) { + $e.preventDefault(); + $e.stopImmediatePropagation(); + _.defer(function() { + $input.focus(); + }); + } + }); + $menu.on("mousedown.tt", function($e) { + $e.preventDefault(); + }); + }, + _onSelectableClicked: function onSelectableClicked(type, $el) { + this.select($el); + }, + _onDatasetCleared: function onDatasetCleared() { + this._updateHint(); + }, + _onDatasetRendered: function onDatasetRendered(type, dataset, suggestions, async) { + this._updateHint(); + this.eventBus.trigger("render", suggestions, async, dataset); + }, + _onAsyncRequested: function onAsyncRequested(type, dataset, query) { + this.eventBus.trigger("asyncrequest", query, dataset); + }, + _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) { + this.eventBus.trigger("asynccancel", query, dataset); + }, + _onAsyncReceived: function onAsyncReceived(type, dataset, query) { + this.eventBus.trigger("asyncreceive", query, dataset); + }, + _onFocused: function onFocused() { + this._minLengthMet() && this.menu.update(this.input.getQuery()); + }, + _onBlurred: function onBlurred() { + if (this.input.hasQueryChangedSinceLastFocus()) { + this.eventBus.trigger("change", this.input.getQuery()); + } + }, + _onEnterKeyed: function onEnterKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } + }, + _onTabKeyed: function onTabKeyed(type, $e) { + var $selectable; + if ($selectable = this.menu.getActiveSelectable()) { + this.select($selectable) && $e.preventDefault(); + } else if ($selectable = this.menu.getTopSelectable()) { + this.autocomplete($selectable) && $e.preventDefault(); + } + }, + _onEscKeyed: function onEscKeyed() { + this.close(); + }, + _onUpKeyed: function onUpKeyed() { + this.moveCursor(-1); + }, + _onDownKeyed: function onDownKeyed() { + this.moveCursor(+1); + }, + _onLeftKeyed: function onLeftKeyed() { + if (this.dir === "rtl" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getTopSelectable()); + } + }, + _onRightKeyed: function onRightKeyed() { + if (this.dir === "ltr" && this.input.isCursorAtEnd()) { + this.autocomplete(this.menu.getTopSelectable()); + } + }, + _onQueryChanged: function onQueryChanged(e, query) { + this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty(); + }, + _onWhitespaceChanged: function onWhitespaceChanged() { + this._updateHint(); + }, + _onLangDirChanged: function onLangDirChanged(e, dir) { + if (this.dir !== dir) { + this.dir = dir; + this.menu.setLanguageDirection(dir); + } + }, + _openIfActive: function openIfActive() { + this.isActive() && this.open(); + }, + _minLengthMet: function minLengthMet(query) { + query = _.isString(query) ? query : this.input.getQuery() || ""; + return query.length >= this.minLength; + }, + _updateHint: function updateHint() { + var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match; + $selectable = this.menu.getTopSelectable(); + data = this.menu.getSelectableData($selectable); + val = this.input.getInputValue(); + if (data && !_.isBlankString(val) && !this.input.hasOverflow()) { + query = Input.normalizeQuery(val); + escapedQuery = _.escapeRegExChars(query); + frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i"); + match = frontMatchRegEx.exec(data.val); + match && this.input.setHint(val + match[1]); + } else { + this.input.clearHint(); + } + }, + isEnabled: function isEnabled() { + return this.enabled; + }, + enable: function enable() { + this.enabled = true; + }, + disable: function disable() { + this.enabled = false; + }, + isActive: function isActive() { + return this.active; + }, + activate: function activate() { + if (this.isActive()) { + return true; + } else if (!this.isEnabled() || this.eventBus.before("active")) { + return false; + } else { + this.active = true; + this.eventBus.trigger("active"); + return true; + } + }, + deactivate: function deactivate() { + if (!this.isActive()) { + return true; + } else if (this.eventBus.before("idle")) { + return false; + } else { + this.active = false; + this.close(); + this.eventBus.trigger("idle"); + return true; + } + }, + isOpen: function isOpen() { + return this.menu.isOpen(); + }, + open: function open() { + if (!this.isOpen() && !this.eventBus.before("open")) { + this.menu.open(); + this._updateHint(); + this.eventBus.trigger("open"); + } + return this.isOpen(); + }, + close: function close() { + if (this.isOpen() && !this.eventBus.before("close")) { + this.menu.close(); + this.input.clearHint(); + this.input.resetInputValue(); + this.eventBus.trigger("close"); + } + return !this.isOpen(); + }, + setVal: function setVal(val) { + this.input.setQuery(_.toStr(val)); + }, + getVal: function getVal() { + return this.input.getQuery(); + }, + select: function select($selectable) { + var data = this.menu.getSelectableData($selectable); + if (data && !this.eventBus.before("select", data.obj)) { + this.input.setQuery(data.val, true); + this.eventBus.trigger("select", data.obj); + this.close(); + return true; + } + return false; + }, + autocomplete: function autocomplete($selectable) { + var query, data, isValid; + query = this.input.getQuery(); + data = this.menu.getSelectableData($selectable); + isValid = data && query !== data.val; + if (isValid && !this.eventBus.before("autocomplete", data.obj)) { + this.input.setQuery(data.val); + this.eventBus.trigger("autocomplete", data.obj); + return true; + } + return false; + }, + moveCursor: function moveCursor(delta) { + var query, $candidate, data, payload, cancelMove; + query = this.input.getQuery(); + $candidate = this.menu.selectableRelativeToCursor(delta); + data = this.menu.getSelectableData($candidate); + payload = data ? data.obj : null; + cancelMove = this._minLengthMet() && this.menu.update(query); + if (!cancelMove && !this.eventBus.before("cursorchange", payload)) { + this.menu.setCursor($candidate); + if (data) { + this.input.setInputValue(data.val); + } else { + this.input.resetInputValue(); + this._updateHint(); + } + this.eventBus.trigger("cursorchange", payload); + return true; + } + return false; + }, + destroy: function destroy() { + this.input.destroy(); + this.menu.destroy(); + } + }); + return Typeahead; + function c(ctx) { + var methods = [].slice.call(arguments, 1); + return function() { + var args = [].slice.call(arguments); + _.each(methods, function(method) { + return ctx[method].apply(ctx, args); + }); + }; + } + }(); + (function() { + "use strict"; + var old, keys, methods; + old = $.fn.typeahead; + keys = { + www: "tt-www", + attrs: "tt-attrs", + typeahead: "tt-typeahead" + }; + methods = { + initialize: function initialize(o, datasets) { + var www; + datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1); + o = o || {}; + www = WWW(o.classNames); + return this.each(attach); + function attach() { + var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, typeahead, MenuConstructor; + _.each(datasets, function(d) { + d.highlight = !!o.highlight; + }); + $input = $(this); + $wrapper = $(www.html.wrapper); + $hint = $elOrNull(o.hint); + $menu = $elOrNull(o.menu); + defaultHint = o.hint !== false && !$hint; + defaultMenu = o.menu !== false && !$menu; + defaultHint && ($hint = buildHintFromInput($input, www)); + defaultMenu && ($menu = $(www.html.menu).css(www.css.menu)); + $hint && $hint.val(""); + $input = prepInput($input, www); + if (defaultHint || defaultMenu) { + $wrapper.css(www.css.wrapper); + $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint); + $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null); + } + MenuConstructor = defaultMenu ? DefaultMenu : Menu; + eventBus = new EventBus({ + el: $input + }); + input = new Input({ + hint: $hint, + input: $input + }, www); + menu = new MenuConstructor({ + node: $menu, + datasets: datasets + }, www); + typeahead = new Typeahead({ + input: input, + menu: menu, + eventBus: eventBus, + minLength: o.minLength + }, www); + $input.data(keys.www, www); + $input.data(keys.typeahead, typeahead); + } + }, + isEnabled: function isEnabled() { + var enabled; + ttEach(this.first(), function(t) { + enabled = t.isEnabled(); + }); + return enabled; + }, + enable: function enable() { + ttEach(this, function(t) { + t.enable(); + }); + return this; + }, + disable: function disable() { + ttEach(this, function(t) { + t.disable(); + }); + return this; + }, + isActive: function isActive() { + var active; + ttEach(this.first(), function(t) { + active = t.isActive(); + }); + return active; + }, + activate: function activate() { + ttEach(this, function(t) { + t.activate(); + }); + return this; + }, + deactivate: function deactivate() { + ttEach(this, function(t) { + t.deactivate(); + }); + return this; + }, + isOpen: function isOpen() { + var open; + ttEach(this.first(), function(t) { + open = t.isOpen(); + }); + return open; + }, + open: function open() { + ttEach(this, function(t) { + t.open(); + }); + return this; + }, + close: function close() { + ttEach(this, function(t) { + t.close(); + }); + return this; + }, + select: function select(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.select($el); + }); + return success; + }, + autocomplete: function autocomplete(el) { + var success = false, $el = $(el); + ttEach(this.first(), function(t) { + success = t.autocomplete($el); + }); + return success; + }, + moveCursor: function moveCursoe(delta) { + var success = false; + ttEach(this.first(), function(t) { + success = t.moveCursor(delta); + }); + return success; + }, + val: function val(newVal) { + var query; + if (!arguments.length) { + ttEach(this.first(), function(t) { + query = t.getVal(); + }); + return query; + } else { + ttEach(this, function(t) { + t.setVal(newVal); + }); + return this; + } + }, + destroy: function destroy() { + ttEach(this, function(typeahead, $input) { + revert($input); + typeahead.destroy(); + }); + return this; + } + }; + $.fn.typeahead = function(method) { + if (methods[method]) { + return methods[method].apply(this, [].slice.call(arguments, 1)); + } else { + return methods.initialize.apply(this, arguments); + } + }; + $.fn.typeahead.noConflict = function noConflict() { + $.fn.typeahead = old; + return this; + }; + function ttEach($els, fn) { + $els.each(function() { + var $input = $(this), typeahead; + (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input); + }); + } + function buildHintFromInput($input, www) { + return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop("readonly", true).removeAttr("id name placeholder required").attr({ + autocomplete: "off", + spellcheck: "false", + tabindex: -1 + }); + } + function prepInput($input, www) { + $input.data(keys.attrs, { + dir: $input.attr("dir"), + autocomplete: $input.attr("autocomplete"), + spellcheck: $input.attr("spellcheck"), + style: $input.attr("style") + }); + $input.addClass(www.classes.input).attr({ + autocomplete: "off", + spellcheck: false + }); + try { + !$input.attr("dir") && $input.attr("dir", "auto"); + } catch (e) {} + return $input; + } + function getBackgroundStyles($el) { + return { + backgroundAttachment: $el.css("background-attachment"), + backgroundClip: $el.css("background-clip"), + backgroundColor: $el.css("background-color"), + backgroundImage: $el.css("background-image"), + backgroundOrigin: $el.css("background-origin"), + backgroundPosition: $el.css("background-position"), + backgroundRepeat: $el.css("background-repeat"), + backgroundSize: $el.css("background-size") + }; + } + function revert($input) { + var www, $wrapper; + www = $input.data(keys.www); + $wrapper = $input.parent().filter(www.selectors.wrapper); + _.each($input.data(keys.attrs), function(val, key) { + _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val); + }); + $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input); + if ($wrapper.length) { + $input.detach().insertAfter($wrapper); + $wrapper.remove(); + } + } + function $elOrNull(obj) { + var isValid, $el; + isValid = _.isJQuery(obj) || _.isElement(obj); + $el = isValid ? $(obj).first() : []; + return $el.length ? $el : null; + } + })(); +}); \ No newline at end of file diff --git a/debian/dconv/parser/__init__.py b/debian/dconv/parser/__init__.py new file mode 100644 index 0000000..82b8522 --- /dev/null +++ b/debian/dconv/parser/__init__.py @@ -0,0 +1,81 @@ +__all__ = [ + 'arguments', + 'example', + 'keyword', + 'seealso', + 'table', + 'underline' +] + + +class Parser: + def __init__(self, pctxt): + self.pctxt = pctxt + + def parse(self, line): + return line + +class PContext: + def __init__(self, templates = None): + self.set_content_list([]) + self.templates = templates + + def set_content(self, content): + self.set_content_list(content.split("\n")) + + def set_content_list(self, content): + self.lines = content + self.nblines = len(self.lines) + self.i = 0 + self.stop = False + + def get_lines(self): + return self.lines + + def eat_lines(self): + count = 0 + while self.has_more_lines() and self.lines[self.i].strip(): + count += 1 + self.next() + return count + + def eat_empty_lines(self): + count = 0 + while self.has_more_lines() and not self.lines[self.i].strip(): + count += 1 + self.next() + return count + + def next(self, count=1): + self.i += count + + def has_more_lines(self, offset=0): + return self.i + offset < self.nblines + + def get_line(self, offset=0): + return self.lines[self.i + offset].rstrip() + + +# Get the indentation of a line +def get_indent(line): + indent = 0 + length = len(line) + while indent < length and line[indent] == ' ': + indent += 1 + return indent + + +# Remove unneeded indentation +def remove_indent(list): + # Detect the minimum indentation in the list + min_indent = -1 + for line in list: + if not line.strip(): + continue + indent = get_indent(line) + if min_indent < 0 or indent < min_indent: + min_indent = indent + # Realign the list content to remove the minimum indentation + if min_indent > 0: + for index, line in enumerate(list): + list[index] = line[min_indent:] diff --git a/debian/dconv/parser/arguments.py b/debian/dconv/parser/arguments.py new file mode 100644 index 0000000..c11f711 --- /dev/null +++ b/debian/dconv/parser/arguments.py @@ -0,0 +1,132 @@ +import sys +import re +import parser + +''' +TODO: Allow inner data parsing (this will allow to parse the examples provided in an arguments block) +''' +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + #template = pctxt.templates.get_template("parser/arguments.tpl") + #self.replace = template.render().strip() + + def parse(self, line): + #return re.sub(r'(Arguments *:)', self.replace, line) + pctxt = self.pctxt + + result = re.search(r'(Arguments? *:)', line) + if result: + label = result.group(0) + content = [] + + desc_indent = False + desc = re.sub(r'.*Arguments? *:', '', line).strip() + + indent = parser.get_indent(line) + + pctxt.next() + pctxt.eat_empty_lines() + + arglines = [] + if desc != "none": + add_empty_lines = 0 + while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) > indent): + for j in xrange(0, add_empty_lines): + arglines.append("") + arglines.append(pctxt.get_line()) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + ''' + print line + + if parser.get_indent(line) == arg_indent: + argument = re.sub(r' *([^ ]+).*', r'\1', line) + if argument: + #content.append("%s" % argument) + arg_desc = [line.replace(argument, " " * len(self.unescape(argument)), 1)] + #arg_desc = re.sub(r'( *)([^ ]+)(.*)', r'\1\2\3', line) + arg_desc_indent = parser.get_indent(arg_desc[0]) + arg_desc[0] = arg_desc[0][arg_indent:] + pctxt.next() + add_empty_lines = 0 + while pctxt.has_more_lines and parser.get_indent(pctxt.get_line()) >= arg_indent: + for i in xrange(0, add_empty_lines): + arg_desc.append("") + arg_desc.append(pctxt.get_line()[arg_indent:]) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + # TODO : reduce space at the beginnning + content.append({ + 'name': argument, + 'desc': arg_desc + }) + ''' + + if arglines: + new_arglines = [] + #content = self.parse_args(arglines) + parser.remove_indent(arglines) + ''' + pctxt2 = parser.PContext(pctxt.templates) + pctxt2.set_content_list(arglines) + while pctxt2.has_more_lines(): + new_arglines.append(parser.example.Parser(pctxt2).parse(pctxt2.get_line())) + pctxt2.next() + arglines = new_arglines + ''' + + pctxt.stop = True + + template = pctxt.templates.get_template("parser/arguments.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + content=arglines + #content=content + ) + return line + + return line + +''' + def parse_args(self, data): + args = [] + + pctxt = parser.PContext() + pctxt.set_content_list(data) + + while pctxt.has_more_lines(): + line = pctxt.get_line() + arg_indent = parser.get_indent(line) + argument = re.sub(r' *([^ ]+).*', r'\1', line) + if True or argument: + arg_desc = [] + trailing_desc = line.replace(argument, " " * len(self.unescape(argument)), 1)[arg_indent:] + if trailing_desc.strip(): + arg_desc.append(trailing_desc) + pctxt.next() + add_empty_lines = 0 + while pctxt.has_more_lines() and parser.get_indent(pctxt.get_line()) > arg_indent: + for i in xrange(0, add_empty_lines): + arg_desc.append("") + arg_desc.append(pctxt.get_line()[arg_indent:]) + pctxt.next() + add_empty_lines = pctxt.eat_empty_lines() + + parser.remove_indent(arg_desc) + + args.append({ + 'name': argument, + 'desc': arg_desc + }) + return args + + def unescape(self, s): + s = s.replace("<", "<") + s = s.replace(">", ">") + # this has to be last: + s = s.replace("&", "&") + return s +''' diff --git a/debian/dconv/parser/example.py b/debian/dconv/parser/example.py new file mode 100644 index 0000000..0b339a4 --- /dev/null +++ b/debian/dconv/parser/example.py @@ -0,0 +1,77 @@ +import re +import parser + +# Detect examples blocks +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + template = pctxt.templates.get_template("parser/example/comment.tpl") + self.comment = template.render(pctxt=pctxt).strip() + + + def parse(self, line): + pctxt = self.pctxt + + result = re.search(r'^ *(Examples? *:)(.*)', line) + if result: + label = result.group(1) + + desc_indent = False + desc = result.group(2).strip() + + # Some examples have a description + if desc: + desc_indent = len(line) - len(desc) + + indent = parser.get_indent(line) + + if desc: + # And some description are on multiple lines + while pctxt.get_line(1) and parser.get_indent(pctxt.get_line(1)) == desc_indent: + desc += " " + pctxt.get_line(1).strip() + pctxt.next() + + pctxt.next() + add_empty_line = pctxt.eat_empty_lines() + + content = [] + + if parser.get_indent(pctxt.get_line()) > indent: + if desc: + desc = desc[0].upper() + desc[1:] + add_empty_line = 0 + while pctxt.has_more_lines() and ((not pctxt.get_line()) or (parser.get_indent(pctxt.get_line()) > indent)): + if pctxt.get_line(): + for j in xrange(0, add_empty_line): + content.append("") + + content.append(re.sub(r'(#.*)$', self.comment, pctxt.get_line())) + add_empty_line = 0 + else: + add_empty_line += 1 + pctxt.next() + elif parser.get_indent(pctxt.get_line()) == indent: + # Simple example that can't have empty lines + if add_empty_line and desc: + # This means that the example was on the same line as the 'Example' tag + # and was not a description + content.append(" " * indent + desc) + desc = False + else: + while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) >= indent): + content.append(pctxt.get_line()) + pctxt.next() + pctxt.eat_empty_lines() # Skip empty remaining lines + + pctxt.stop = True + + parser.remove_indent(content) + + template = pctxt.templates.get_template("parser/example.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + content=content + ) + return line diff --git a/debian/dconv/parser/keyword.py b/debian/dconv/parser/keyword.py new file mode 100644 index 0000000..5f861fb --- /dev/null +++ b/debian/dconv/parser/keyword.py @@ -0,0 +1,142 @@ +import re +import parser +from urllib import quote + +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + self.keywordPattern = re.compile(r'^(%s%s)(%s)' % ( + '([a-z][a-z0-9\-\+_\.]*[a-z0-9\-\+_)])', # keyword + '( [a-z0-9\-_]+)*', # subkeywords + '(\([^ ]*\))?', # arg (ex: (), (/), (,[,]) ... + )) + + def parse(self, line): + pctxt = self.pctxt + keywords = pctxt.keywords + keywordsCount = pctxt.keywordsCount + chapters = pctxt.chapters + + res = "" + + if line != "" and not re.match(r'^ ', line): + parsed = self.keywordPattern.match(line) + if parsed != None: + keyword = parsed.group(1) + arg = parsed.group(4) + parameters = line[len(keyword) + len(arg):] + if (parameters != "" and not re.match("^ +((<|\[|\{|/).*|(: [a-z +]+))?(\(deprecated\))?$", parameters)): + # Dirty hack + # - parameters should only start with the characer "<", "[", "{", "/" + # - or a column (":") followed by a alpha keywords to identify fetching samples (optionally separated by the character "+") + # - or the string "(deprecated)" at the end + keyword = False + else: + splitKeyword = keyword.split(" ") + + parameters = arg + parameters + else: + keyword = False + + if keyword and (len(splitKeyword) <= 5): + toplevel = pctxt.details["toplevel"] + for j in xrange(0, len(splitKeyword)): + subKeyword = " ".join(splitKeyword[0:j + 1]) + if subKeyword != "no": + if not subKeyword in keywords: + keywords[subKeyword] = set() + keywords[subKeyword].add(pctxt.details["chapter"]) + res += '' % subKeyword + res += '' % (toplevel, subKeyword) + res += '' % (pctxt.details["chapter"], subKeyword) + res += '' % (subKeyword, chapters[toplevel]['title']) + res += '' % (subKeyword, chapters[pctxt.details["chapter"]]['title']) + + deprecated = parameters.find("(deprecated)") + if deprecated != -1: + prefix = "" + suffix = "" + parameters = parameters.replace("(deprecated)", '(deprecated)') + else: + prefix = "" + suffix = "" + + nextline = pctxt.get_line(1) + + while nextline.startswith(" "): + # Found parameters on the next line + parameters += "\n" + nextline + pctxt.next() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + + parameters = self.colorize(parameters) + res += '
    %s%s%s%s
    ' % (prefix, keyword, quote("%s-%s" % (pctxt.details["chapter"], keyword)), keyword, parameters, suffix) + pctxt.next() + pctxt.stop = True + elif line.startswith("/*"): + # Skip comments in the documentation + while not pctxt.get_line().endswith("*/"): + pctxt.next() + pctxt.next() + else: + # This is probably not a keyword but a text, ignore it + res += line + else: + res += line + + return res + + # Used to colorize keywords parameters + # TODO : use CSS styling + def colorize(self, text): + colorized = "" + tags = [ + [ "[" , "]" , "#008" ], + [ "{" , "}" , "#800" ], + [ "<", ">", "#080" ], + ] + heap = [] + pos = 0 + while pos < len(text): + substring = text[pos:] + found = False + for tag in tags: + if substring.startswith(tag[0]): + # Opening tag + heap.append(tag) + colorized += '%s' % (tag[2], substring[0:len(tag[0])]) + pos += len(tag[0]) + found = True + break + elif substring.startswith(tag[1]): + # Closing tag + + # pop opening tags until the corresponding one is found + openingTag = False + while heap and openingTag != tag: + openingTag = heap.pop() + if openingTag != tag: + colorized += '' + # all intermediate tags are now closed, we can display the tag + colorized += substring[0:len(tag[1])] + # and the close it if it was previously opened + if openingTag == tag: + colorized += '' + pos += len(tag[1]) + found = True + break + if not found: + colorized += substring[0] + pos += 1 + # close all unterminated tags + while heap: + tag = heap.pop() + colorized += '' + + return colorized + + diff --git a/debian/dconv/parser/seealso.py b/debian/dconv/parser/seealso.py new file mode 100644 index 0000000..bbb53f9 --- /dev/null +++ b/debian/dconv/parser/seealso.py @@ -0,0 +1,32 @@ +import re +import parser + +class Parser(parser.Parser): + def parse(self, line): + pctxt = self.pctxt + + result = re.search(r'(See also *:)', line) + if result: + label = result.group(0) + + desc = re.sub(r'.*See also *:', '', line).strip() + + indent = parser.get_indent(line) + + # Some descriptions are on multiple lines + while pctxt.has_more_lines(1) and parser.get_indent(pctxt.get_line(1)) >= indent: + desc += " " + pctxt.get_line(1).strip() + pctxt.next() + + pctxt.eat_empty_lines() + pctxt.next() + pctxt.stop = True + + template = pctxt.templates.get_template("parser/seealso.tpl") + return template.render( + pctxt=pctxt, + label=label, + desc=desc, + ) + + return line diff --git a/debian/dconv/parser/table.py b/debian/dconv/parser/table.py new file mode 100644 index 0000000..c7dad0b --- /dev/null +++ b/debian/dconv/parser/table.py @@ -0,0 +1,244 @@ +import re +import sys +import parser + +class Parser(parser.Parser): + def __init__(self, pctxt): + parser.Parser.__init__(self, pctxt) + self.table1Pattern = re.compile(r'^ *(-+\+)+-+') + self.table2Pattern = re.compile(r'^ *\+(-+\+)+') + + def parse(self, line): + global document, keywords, keywordsCount, chapters, keyword_conflicts + + pctxt = self.pctxt + + if pctxt.context['headers']['subtitle'] != 'Configuration Manual': + # Quick exit + return line + elif pctxt.details['chapter'] == "4": + # BUG: the matrix in chapter 4. Proxies is not well displayed, we skip this chapter + return line + + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + if self.table1Pattern.match(nextline): + # activate table rendering only for the Configuration Manual + lineSeparator = nextline + nbColumns = nextline.count("+") + 1 + extraColumns = 0 + print >> sys.stderr, "Entering table mode (%d columns)" % nbColumns + table = [] + if line.find("|") != -1: + row = [] + while pctxt.has_more_lines(): + line = pctxt.get_line() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + if line == lineSeparator: + # New row + table.append(row) + row = [] + if nextline.find("|") == -1: + break # End of table + else: + # Data + columns = line.split("|") + for j in xrange(0, len(columns)): + try: + if row[j]: + row[j] += "
    " + row[j] += columns[j].strip() + except: + row.append(columns[j].strip()) + pctxt.next() + else: + row = [] + headers = nextline + while pctxt.has_more_lines(): + line = pctxt.get_line() + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + else: + nextline = "" + + if nextline == "": + if row: table.append(row) + break # End of table + + if (line != lineSeparator) and (line[0] != "-"): + start = 0 + + if row and not line.startswith(" "): + # Row is complete, parse a new one + table.append(row) + row = [] + + tmprow = [] + while start != -1: + end = headers.find("+", start) + if end == -1: + end = len(headers) + + realend = end + if realend == len(headers): + realend = len(line) + else: + while realend < len(line) and line[realend] != " ": + realend += 1 + end += 1 + + tmprow.append(line[start:realend]) + + start = end + 1 + if start >= len(headers): + start = -1 + for j in xrange(0, nbColumns): + try: + row[j] += tmprow[j].strip() + except: + row.append(tmprow[j].strip()) + + deprecated = row[0].endswith("(deprecated)") + if deprecated: + row[0] = row[0][: -len("(deprecated)")].rstrip() + + nooption = row[1].startswith("(*)") + if nooption: + row[1] = row[1][len("(*)"):].strip() + + if deprecated or nooption: + extraColumns = 1 + extra = "" + if deprecated: + extra += '(deprecated)' + if nooption: + extra += '(*)' + row.append(extra) + + pctxt.next() + print >> sys.stderr, "Leaving table mode" + pctxt.next() # skip useless next line + pctxt.stop = True + + return self.renderTable(table, nbColumns, pctxt.details["toplevel"]) + # elif self.table2Pattern.match(line): + # return self.parse_table_format2() + elif line.find("May be used in sections") != -1: + nextline = pctxt.get_line(1) + rows = [] + headers = line.split(":") + rows.append(headers[1].split("|")) + rows.append(nextline.split("|")) + table = { + "rows": rows, + "title": headers[0] + } + pctxt.next(2) # skip this previous table + pctxt.stop = True + + return self.renderTable(table) + + return line + + + def parse_table_format2(self): + pctxt = self.pctxt + + linesep = pctxt.get_line() + rows = [] + + pctxt.next() + maxcols = 0 + while pctxt.get_line().strip().startswith("|"): + row = pctxt.get_line().strip()[1:-1].split("|") + rows.append(row) + maxcols = max(maxcols, len(row)) + pctxt.next() + if pctxt.get_line() == linesep: + # TODO : find a way to define a special style for next row + pctxt.next() + pctxt.stop = True + + return self.renderTable(rows, maxcols) + + # Render tables detected by the conversion parser + def renderTable(self, table, maxColumns = 0, toplevel = None): + pctxt = self.pctxt + template = pctxt.templates.get_template("parser/table.tpl") + + res = "" + + title = None + if isinstance(table, dict): + title = table["title"] + table = table["rows"] + + if not maxColumns: + maxColumns = len(table[0]) + + rows = [] + + mode = "th" + headerLine = "" + hasKeywords = False + i = 0 + for row in table: + line = "" + + if i == 0: + row_template = pctxt.templates.get_template("parser/table/header.tpl") + else: + row_template = pctxt.templates.get_template("parser/table/row.tpl") + + if i > 1 and (i - 1) % 20 == 0 and len(table) > 50: + # Repeat headers periodically for long tables + rows.append(headerLine) + + j = 0 + cols = [] + for column in row: + if j >= maxColumns: + break + + tplcol = {} + + data = column.strip() + keyword = column + if j == 0 and i == 0 and keyword == 'keyword': + hasKeywords = True + if j == 0 and i != 0 and hasKeywords: + if keyword.startswith("[no] "): + keyword = keyword[len("[no] "):] + tplcol['toplevel'] = toplevel + tplcol['keyword'] = keyword + tplcol['extra'] = [] + if j == 0 and len(row) > maxColumns: + for k in xrange(maxColumns, len(row)): + tplcol['extra'].append(row[k]) + tplcol['data'] = data + cols.append(tplcol) + j += 1 + mode = "td" + + line = row_template.render( + pctxt=pctxt, + columns=cols + ).strip() + if i == 0: + headerLine = line + + rows.append(line) + + i += 1 + + return template.render( + pctxt=pctxt, + title=title, + rows=rows, + ) diff --git a/debian/dconv/parser/underline.py b/debian/dconv/parser/underline.py new file mode 100644 index 0000000..3a2350c --- /dev/null +++ b/debian/dconv/parser/underline.py @@ -0,0 +1,16 @@ +import parser + +class Parser(parser.Parser): + # Detect underlines + def parse(self, line): + pctxt = self.pctxt + if pctxt.has_more_lines(1): + nextline = pctxt.get_line(1) + if (len(line) > 0) and (len(nextline) > 0) and (nextline[0] == '-') and ("-" * len(line) == nextline): + template = pctxt.templates.get_template("parser/underline.tpl") + line = template.render(pctxt=pctxt, data=line).strip() + pctxt.next(2) + pctxt.eat_empty_lines() + pctxt.stop = True + + return line diff --git a/debian/dconv/templates/parser/arguments.tpl b/debian/dconv/templates/parser/arguments.tpl new file mode 100644 index 0000000..b5f91e9 --- /dev/null +++ b/debian/dconv/templates/parser/arguments.tpl @@ -0,0 +1,9 @@ +
    +${label}\ +% if desc: + ${desc} +% endif +% if content: +
    ${"\n".join(content)}
    +% endif +
    diff --git a/debian/dconv/templates/parser/example.tpl b/debian/dconv/templates/parser/example.tpl new file mode 100644 index 0000000..184b6dd --- /dev/null +++ b/debian/dconv/templates/parser/example.tpl @@ -0,0 +1,12 @@ +
    +${label} +
    +% if desc:
    +
    ${desc}
    \ +% endif +\ +% for line in content: +${line} +% endfor +
    +
    \ No newline at end of file diff --git a/debian/dconv/templates/parser/example/comment.tpl b/debian/dconv/templates/parser/example/comment.tpl new file mode 100644 index 0000000..b51ec2d --- /dev/null +++ b/debian/dconv/templates/parser/example/comment.tpl @@ -0,0 +1 @@ +\1 \ No newline at end of file diff --git a/debian/dconv/templates/parser/seealso.tpl b/debian/dconv/templates/parser/seealso.tpl new file mode 100644 index 0000000..72cf5f9 --- /dev/null +++ b/debian/dconv/templates/parser/seealso.tpl @@ -0,0 +1 @@ + diff --git a/debian/dconv/templates/parser/table.tpl b/debian/dconv/templates/parser/table.tpl new file mode 100644 index 0000000..0119176 --- /dev/null +++ b/debian/dconv/templates/parser/table.tpl @@ -0,0 +1,11 @@ +% if title: +

    ${title} :

    \ +% endif + +% for row in rows: +${row} +% endfor +
    \ +% if title: +
    +% endif \ No newline at end of file diff --git a/debian/dconv/templates/parser/table/header.tpl b/debian/dconv/templates/parser/table/header.tpl new file mode 100644 index 0000000..e84b47f --- /dev/null +++ b/debian/dconv/templates/parser/table/header.tpl @@ -0,0 +1,6 @@ +\ +% for col in columns: +<% data = col['data'] %>\ +${data}\ +% endfor + diff --git a/debian/dconv/templates/parser/table/row.tpl b/debian/dconv/templates/parser/table/row.tpl new file mode 100644 index 0000000..658469c --- /dev/null +++ b/debian/dconv/templates/parser/table/row.tpl @@ -0,0 +1,36 @@ +<% from urllib import quote %> +<% base = pctxt.context['base'] %> +\ +% for col in columns: +<% data = col['data'] %>\ +<% + if data in ['yes']: + style = "class=\"alert-success pagination-centered\"" + data = 'yes
    yes' % base + elif data in ['no']: + style = "class=\"alert-error pagination-centered\"" + data = 'no
    no' % base + elif data in ['X']: + style = "class=\"pagination-centered\"" + data = 'X' % base + elif data in ['-']: + style = "class=\"pagination-centered\"" + data = ' ' + elif data in ['*']: + style = "class=\"pagination-centered\"" + else: + style = None +%>\ +\ +% if "keyword" in col: +\ +% for extra in col['extra']: +${extra}\ +% endfor +${data}\ +% else: +${data}\ +% endif +\ +% endfor + diff --git a/debian/dconv/templates/parser/underline.tpl b/debian/dconv/templates/parser/underline.tpl new file mode 100644 index 0000000..4f35f7e --- /dev/null +++ b/debian/dconv/templates/parser/underline.tpl @@ -0,0 +1 @@ +
    ${data}
    diff --git a/debian/dconv/templates/summary.html b/debian/dconv/templates/summary.html new file mode 100644 index 0000000..87c6414 --- /dev/null +++ b/debian/dconv/templates/summary.html @@ -0,0 +1,43 @@ + + +
    +
    + <% previousLevel = None %> + % for k in chapterIndexes: + <% chapter = chapters[k] %> + % if chapter['title']: + <% + if chapter['level'] == 1: + otag = "" + etag = "" + else: + otag = etag = "" + %> + % if chapter['chapter'] == '7': + ## Quick and dirty hack to split the summary in 2 columns + ## TODO : implement a generic way split the summary +
    + <% previousLevel = None %> + % endif + % if otag and previousLevel: +
    + % endif +
    +
    ${otag}${chapter['chapter']}.${etag}
    +
    + % for tab in range(1, chapter['level']): +
    + % endfor + ${otag}${chapter['title']}${etag} + % for tab in range(1, chapter['level']): +
    + % endfor +
    +
    + <% previousLevel = chapter['level'] %> + % endif + % endfor +
    +
    diff --git a/debian/dconv/templates/template.html b/debian/dconv/templates/template.html new file mode 100644 index 0000000..c72b355 --- /dev/null +++ b/debian/dconv/templates/template.html @@ -0,0 +1,238 @@ + + + + + ${headers['title']} ${headers['version']} - ${headers['subtitle']} + + + + + + + +
    + + + + +
    +
    +
    +
    +

    ${headers['title']}

    +

    ${headers['subtitle']}

    +

    ${headers['version']}

    +

    +
    + ${headers['author']}
    + ${headers['date']} +

    +
    + + ${document} +
    +
    +
    + ${headers['title']} ${headers['version'].replace("version ", "")} – ${headers['subtitle']}
    + ${headers['date']}, ${headers['author']} +
    +
    + +
    + +
    +
      + + +
    +
    +
    + + +
    + + + + + + + ${footer} + + + diff --git a/debian/dconv/tools/generate-docs.sh b/debian/dconv/tools/generate-docs.sh new file mode 100755 index 0000000..36fdf1b --- /dev/null +++ b/debian/dconv/tools/generate-docs.sh @@ -0,0 +1,177 @@ +#!/bin/bash + +PROJECT_HOME=$(dirname $(readlink -f $0)) +cd $PROJECT_HOME || exit 1 + +WORK_DIR=$PROJECT_HOME/work + +function on_exit() +{ + echo "-- END $(date)" +} + +function init() +{ + trap on_exit EXIT + + echo + echo "-- START $(date)" + echo "PROJECT_HOME = $PROJECT_HOME" + + echo "Preparing work directories..." + mkdir -p $WORK_DIR || exit 1 + mkdir -p $WORK_DIR/haproxy || exit 1 + mkdir -p $WORK_DIR/haproxy-dconv || exit 1 + + UPDATED=0 + PUSH=0 + +} + +# Needed as "git -C" is only available since git 1.8.5 +function git-C() +{ + _gitpath=$1 + shift + echo "git --git-dir=$_gitpath/.git --work-tree=$_gitpath $@" >&2 + git --git-dir=$_gitpath/.git --work-tree=$_gitpath "$@" +} + +function fetch_haproxy_dconv() +{ + echo "Fetching latest haproxy-dconv public version..." + if [ ! -e $WORK_DIR/haproxy-dconv/master ]; + then + git clone -v git://github.com/cbonte/haproxy-dconv.git $WORK_DIR/haproxy-dconv/master || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/master" + + OLD_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + $GIT checkout master && $GIT pull -v + version=$($GIT describe --tags) + version=${version%-g*} + NEW_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + if [ "$OLD_MD5" != "$NEW_MD5" ]; + then + UPDATED=1 + fi + + echo "Fetching last haproxy-dconv public pages version..." + if [ ! -e $WORK_DIR/haproxy-dconv/gh-pages ]; + then + cp -a $WORK_DIR/haproxy-dconv/master $WORK_DIR/haproxy-dconv/gh-pages || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/gh-pages" + + $GIT checkout gh-pages && $GIT pull -v +} + +function fetch_haproxy() +{ + url=$1 + path=$2 + + echo "Fetching HAProxy 1.4 repository..." + if [ ! -e $path ]; + then + git clone -v $url $path || exit 1 + fi + GIT="git-C $path" + + $GIT checkout master && $GIT pull -v +} + +function _generate_file() +{ + infile=$1 + destfile=$2 + git_version=$3 + state=$4 + + $GIT checkout $git_version + + if [ -e $gitpath/doc/$infile ]; + then + + git_version_simple=${git_version%-g*} + doc_version=$(tail -n1 $destfile 2>/dev/null | grep " git:" | sed 's/.* git:\([^ ]*\).*/\1/') + if [ $UPDATED -eq 1 -o "$git_version" != "$doc_version" ]; + then + HTAG="VERSION-$(basename $gitpath | sed 's/[.]/\\&/g')" + if [ "$state" == "snapshot" ]; + then + base=".." + HTAG="$HTAG-SNAPSHOT" + else + base="." + fi + + + $WORK_DIR/haproxy-dconv/master/haproxy-dconv.py -i $gitpath/doc/$infile -o $destfile --base=$base && + echo "" >> $destfile && + sed -i "s/\(<\!-- $HTAG -->\)\(.*\)\(<\!-- \/$HTAG -->\)/\1${git_version_simple}\3/" $docroot/index.html + + else + echo "Already up to date." + fi + + if [ "$doc_version" != "" -a "$git_version" != "$doc_version" ]; + then + changelog=$($GIT log --oneline $doc_version..$git_version $gitpath/doc/$infile) + else + changelog="" + fi + + GITDOC="git-C $docroot" + if [ "$($GITDOC status -s $destfile)" != "" ]; + then + $GITDOC add $destfile && + $GITDOC commit -m "Updating HAProxy $state $infile ${git_version_simple} generated by haproxy-dconv $version" -m "$changelog" $destfile $docroot/index.html && + PUSH=1 + fi + fi +} + +function generate_docs() +{ + url=$1 + gitpath=$2 + docroot=$3 + infile=$4 + outfile=$5 + + fetch_haproxy $url $gitpath + + GIT="git-C $gitpath" + + $GIT checkout master + git_version=$($GIT describe --tags --match 'v*') + git_version_stable=${git_version%-*-g*} + + echo "Generating snapshot version $git_version..." + _generate_file $infile $docroot/snapshot/$outfile $git_version snapshot + + echo "Generating stable version $git_version..." + _generate_file $infile $docroot/$outfile $git_version_stable stable +} + +function push() +{ + docroot=$1 + GITDOC="git-C $docroot" + + if [ $PUSH -eq 1 ]; + then + $GITDOC push origin gh-pages + fi + +} + + +init +fetch_haproxy_dconv +generate_docs http://git.1wt.eu/git/haproxy-1.4.git/ $WORK_DIR/haproxy/1.4 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.4.html +generate_docs http://git.1wt.eu/git/haproxy-1.5.git/ $WORK_DIR/haproxy/1.5 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.5.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.6.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages intro.txt intro-1.6.html +push $WORK_DIR/haproxy-dconv/gh-pages diff --git a/debian/gbp.conf b/debian/gbp.conf new file mode 100644 index 0000000..a8e5456 --- /dev/null +++ b/debian/gbp.conf @@ -0,0 +1,4 @@ +[DEFAULT] +pristine-tar = True +upstream-branch = upstream-1.6 +debian-branch = master diff --git a/debian/halog.1 b/debian/halog.1 new file mode 100644 index 0000000..f5dd19f --- /dev/null +++ b/debian/halog.1 @@ -0,0 +1,108 @@ +.TH HALOG "1" "July 2013" "halog" "User Commands" +.SH NAME +halog \- HAProxy log statistics reporter +.SH SYNOPSIS +.B halog +[\fI-h|--help\fR] +.br +.B halog +[\fIoptions\fR] +Only match response times larger|smaller than
    + + +@@ -24,31 +24,16 @@ + +@@ -72,7 +57,7 @@ + The feature is automatically disabled when the search field is focused. +

    +

    +- Converted with haproxy-dconv v${version} on ${date} ++ Converted with haproxy-dconv +

    +
    + +@@ -83,7 +68,7 @@ +
    +

    ${headers['title']}

    +

    ${headers['subtitle']}

    +-

    ${headers['version']}

    ++

    ${headers['version']} (Debian)

    +

    +
    + ${headers['author']}
    +@@ -114,9 +99,9 @@ +

    + + +- +- +- ++ ++ ++ + + + + + ${footer} + + + diff --git a/haproxy-1.6.3/debian/dconv/tools/generate-docs.sh b/haproxy-1.6.3/debian/dconv/tools/generate-docs.sh new file mode 100755 index 0000000..36fdf1b --- /dev/null +++ b/haproxy-1.6.3/debian/dconv/tools/generate-docs.sh @@ -0,0 +1,177 @@ +#!/bin/bash + +PROJECT_HOME=$(dirname $(readlink -f $0)) +cd $PROJECT_HOME || exit 1 + +WORK_DIR=$PROJECT_HOME/work + +function on_exit() +{ + echo "-- END $(date)" +} + +function init() +{ + trap on_exit EXIT + + echo + echo "-- START $(date)" + echo "PROJECT_HOME = $PROJECT_HOME" + + echo "Preparing work directories..." + mkdir -p $WORK_DIR || exit 1 + mkdir -p $WORK_DIR/haproxy || exit 1 + mkdir -p $WORK_DIR/haproxy-dconv || exit 1 + + UPDATED=0 + PUSH=0 + +} + +# Needed as "git -C" is only available since git 1.8.5 +function git-C() +{ + _gitpath=$1 + shift + echo "git --git-dir=$_gitpath/.git --work-tree=$_gitpath $@" >&2 + git --git-dir=$_gitpath/.git --work-tree=$_gitpath "$@" +} + +function fetch_haproxy_dconv() +{ + echo "Fetching latest haproxy-dconv public version..." + if [ ! -e $WORK_DIR/haproxy-dconv/master ]; + then + git clone -v git://github.com/cbonte/haproxy-dconv.git $WORK_DIR/haproxy-dconv/master || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/master" + + OLD_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + $GIT checkout master && $GIT pull -v + version=$($GIT describe --tags) + version=${version%-g*} + NEW_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)" + if [ "$OLD_MD5" != "$NEW_MD5" ]; + then + UPDATED=1 + fi + + echo "Fetching last haproxy-dconv public pages version..." + if [ ! -e $WORK_DIR/haproxy-dconv/gh-pages ]; + then + cp -a $WORK_DIR/haproxy-dconv/master $WORK_DIR/haproxy-dconv/gh-pages || exit 1 + fi + GIT="git-C $WORK_DIR/haproxy-dconv/gh-pages" + + $GIT checkout gh-pages && $GIT pull -v +} + +function fetch_haproxy() +{ + url=$1 + path=$2 + + echo "Fetching HAProxy 1.4 repository..." + if [ ! -e $path ]; + then + git clone -v $url $path || exit 1 + fi + GIT="git-C $path" + + $GIT checkout master && $GIT pull -v +} + +function _generate_file() +{ + infile=$1 + destfile=$2 + git_version=$3 + state=$4 + + $GIT checkout $git_version + + if [ -e $gitpath/doc/$infile ]; + then + + git_version_simple=${git_version%-g*} + doc_version=$(tail -n1 $destfile 2>/dev/null | grep " git:" | sed 's/.* git:\([^ ]*\).*/\1/') + if [ $UPDATED -eq 1 -o "$git_version" != "$doc_version" ]; + then + HTAG="VERSION-$(basename $gitpath | sed 's/[.]/\\&/g')" + if [ "$state" == "snapshot" ]; + then + base=".." + HTAG="$HTAG-SNAPSHOT" + else + base="." + fi + + + $WORK_DIR/haproxy-dconv/master/haproxy-dconv.py -i $gitpath/doc/$infile -o $destfile --base=$base && + echo "" >> $destfile && + sed -i "s/\(<\!-- $HTAG -->\)\(.*\)\(<\!-- \/$HTAG -->\)/\1${git_version_simple}\3/" $docroot/index.html + + else + echo "Already up to date." + fi + + if [ "$doc_version" != "" -a "$git_version" != "$doc_version" ]; + then + changelog=$($GIT log --oneline $doc_version..$git_version $gitpath/doc/$infile) + else + changelog="" + fi + + GITDOC="git-C $docroot" + if [ "$($GITDOC status -s $destfile)" != "" ]; + then + $GITDOC add $destfile && + $GITDOC commit -m "Updating HAProxy $state $infile ${git_version_simple} generated by haproxy-dconv $version" -m "$changelog" $destfile $docroot/index.html && + PUSH=1 + fi + fi +} + +function generate_docs() +{ + url=$1 + gitpath=$2 + docroot=$3 + infile=$4 + outfile=$5 + + fetch_haproxy $url $gitpath + + GIT="git-C $gitpath" + + $GIT checkout master + git_version=$($GIT describe --tags --match 'v*') + git_version_stable=${git_version%-*-g*} + + echo "Generating snapshot version $git_version..." + _generate_file $infile $docroot/snapshot/$outfile $git_version snapshot + + echo "Generating stable version $git_version..." + _generate_file $infile $docroot/$outfile $git_version_stable stable +} + +function push() +{ + docroot=$1 + GITDOC="git-C $docroot" + + if [ $PUSH -eq 1 ]; + then + $GITDOC push origin gh-pages + fi + +} + + +init +fetch_haproxy_dconv +generate_docs http://git.1wt.eu/git/haproxy-1.4.git/ $WORK_DIR/haproxy/1.4 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.4.html +generate_docs http://git.1wt.eu/git/haproxy-1.5.git/ $WORK_DIR/haproxy/1.5 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.5.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.6.html +generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages intro.txt intro-1.6.html +push $WORK_DIR/haproxy-dconv/gh-pages diff --git a/haproxy-1.6.3/debian/gbp.conf b/haproxy-1.6.3/debian/gbp.conf new file mode 100644 index 0000000..a8e5456 --- /dev/null +++ b/haproxy-1.6.3/debian/gbp.conf @@ -0,0 +1,4 @@ +[DEFAULT] +pristine-tar = True +upstream-branch = upstream-1.6 +debian-branch = master diff --git a/haproxy-1.6.3/debian/halog.1 b/haproxy-1.6.3/debian/halog.1 new file mode 100644 index 0000000..f5dd19f --- /dev/null +++ b/haproxy-1.6.3/debian/halog.1 @@ -0,0 +1,108 @@ +.TH HALOG "1" "July 2013" "halog" "User Commands" +.SH NAME +halog \- HAProxy log statistics reporter +.SH SYNOPSIS +.B halog +[\fI-h|--help\fR] +.br +.B halog +[\fIoptions\fR] +Only match response times larger|smaller than
    + + +@@ -24,31 +24,16 @@ + +@@ -72,7 +57,7 @@ + The feature is automatically disabled when the search field is focused. +

    +

    +- Converted with haproxy-dconv v${version} on ${date} ++ Converted with haproxy-dconv +

    +
    + +@@ -83,7 +68,7 @@ +
    +

    ${headers['title']}

    +

    ${headers['subtitle']}

    +-

    ${headers['version']}

    ++

    ${headers['version']} (Debian)

    +

    +
    + ${headers['author']}
    +@@ -114,9 +99,9 @@ +

    + + +- +- +- ++ ++ ++ +