+haproxy (1.6.3-1~u16.04+mos2) mos10.0; urgency=medium
+
+ * SECURITY UPDATE: denial of service via reqdeny
+ - debian/patches/CVE-2016-5360.patch: use temporary variable to store
+ status in include/types/proto_http.h, src/proto_http.c.
+ - CVE-2016-5360
+
+ -- Sergii Golovatiuk <sgolovatiuk@mirantis.com> Tue, 14 Jun 2016 09:35:08 +0300
+
haproxy (1.6.3-1~u16.04+mos1) mos10.0; urgency=medium
* Add MIRA0001-Adding-include-configuration-statement-to-haproxy.patch
--- /dev/null
+From: Willy Tarreau <w@1wt.eu>
+Date: Wed, 25 May 2016 14:23:59 +0000 (+0200)
+Subject: BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes
+X-Git-Url: http://git.haproxy.org/?p=haproxy-1.6.git;a=commitdiff_plain;h=60f01f8c89e4fb2723d5a9f2046286e699567e0b
+
+BUG/MAJOR: http: fix breakage of "reqdeny" causing random crashes
+
+Commit 108b1dd ("MEDIUM: http: configurable http result codes for
+http-request deny") introduced in 1.6-dev2 was incomplete. It introduced
+a new field "rule_deny_status" into struct http_txn, which is filled only
+by actions "http-request deny" and "http-request tarpit". It's then used
+in the deny code path to emit the proper error message, but is used
+uninitialized when the deny comes from a "reqdeny" rule, causing random
+behaviours ranging from returning a 200, an empty response, or crashing
+the process. Often upon startup only 200 was returned but after the fields
+are used the crash happens. This can be sped up using -dM.
+
+There's no need at all for storing this status in the http_txn struct
+anyway since it's used immediately after being set. Let's store it in
+a temporary variable instead which is passed as an argument to function
+http_req_get_intercept_rule().
+
+As an extra benefit, removing it from struct http_txn reduced the size
+of this struct by 8 bytes.
+
+This fix must be backported to 1.6 where the bug was detected. Special
+thanks to Falco Schmutz for his detailed report including an exploitable
+core and a reproducer.
+(cherry picked from commit 58727ec088e55f739b146cff3baa955f8d1b2a3e)
+---
+
+Index: haproxy-1.6.3/include/types/proto_http.h
+===================================================================
+--- haproxy-1.6.3.orig/include/types/proto_http.h 2016-06-14 09:34:29.456987585 +0300
++++ haproxy-1.6.3/include/types/proto_http.h 2016-06-14 09:34:29.448987472 +0300
+@@ -362,7 +362,6 @@
+ unsigned int flags; /* transaction flags */
+ enum http_meth_t meth; /* HTTP method */
+ /* 1 unused byte here */
+- short rule_deny_status; /* HTTP status from rule when denying */
+ short status; /* HTTP status from the server, negative if from proxy */
+
+ char *uri; /* first line if log needed, NULL otherwise */
+Index: haproxy-1.6.3/src/proto_http.c
+===================================================================
+--- haproxy-1.6.3.orig/src/proto_http.c 2016-06-14 09:34:29.456987585 +0300
++++ haproxy-1.6.3/src/proto_http.c 2016-06-14 09:34:29.452987529 +0300
+@@ -3489,10 +3489,12 @@
+ * further processing of the request (auth, deny, ...), and defaults to
+ * HTTP_RULE_RES_STOP if it executed all rules or stopped on an allow, or
+ * HTTP_RULE_RES_CONT if the last rule was reached. It may set the TX_CLTARPIT
+- * on txn->flags if it encounters a tarpit rule.
++ * on txn->flags if it encounters a tarpit rule. If <deny_status> is not NULL
++ * and a deny/tarpit rule is matched, it will be filled with this rule's deny
++ * status.
+ */
+ enum rule_result
+-http_req_get_intercept_rule(struct proxy *px, struct list *rules, struct stream *s)
++http_req_get_intercept_rule(struct proxy *px, struct list *rules, struct stream *s, int *deny_status)
+ {
+ struct session *sess = strm_sess(s);
+ struct http_txn *txn = s->txn;
+@@ -3538,12 +3540,14 @@
+ return HTTP_RULE_RES_STOP;
+
+ case ACT_ACTION_DENY:
+- txn->rule_deny_status = rule->deny_status;
++ if (deny_status)
++ *deny_status = rule->deny_status;
+ return HTTP_RULE_RES_DENY;
+
+ case ACT_HTTP_REQ_TARPIT:
+ txn->flags |= TX_CLTARPIT;
+- txn->rule_deny_status = rule->deny_status;
++ if (deny_status)
++ *deny_status = rule->deny_status;
+ return HTTP_RULE_RES_DENY;
+
+ case ACT_HTTP_REQ_AUTH:
+@@ -4302,6 +4306,7 @@
+ struct redirect_rule *rule;
+ struct cond_wordlist *wl;
+ enum rule_result verdict;
++ int deny_status = HTTP_ERR_403;
+
+ if (unlikely(msg->msg_state < HTTP_MSG_BODY)) {
+ /* we need more data */
+@@ -4322,7 +4327,7 @@
+
+ /* evaluate http-request rules */
+ if (!LIST_ISEMPTY(&px->http_req_rules)) {
+- verdict = http_req_get_intercept_rule(px, &px->http_req_rules, s);
++ verdict = http_req_get_intercept_rule(px, &px->http_req_rules, s, &deny_status);
+
+ switch (verdict) {
+ case HTTP_RULE_RES_YIELD: /* some data miss, call the function later. */
+@@ -4368,7 +4373,7 @@
+
+ /* parse the whole stats request and extract the relevant information */
+ http_handle_stats(s, req);
+- verdict = http_req_get_intercept_rule(px, &px->uri_auth->http_req_rules, s);
++ verdict = http_req_get_intercept_rule(px, &px->uri_auth->http_req_rules, s, &deny_status);
+ /* not all actions implemented: deny, allow, auth */
+
+ if (verdict == HTTP_RULE_RES_DENY) /* stats http-request deny */
+@@ -4487,9 +4492,9 @@
+
+ deny: /* this request was blocked (denied) */
+ txn->flags |= TX_CLDENY;
+- txn->status = http_err_codes[txn->rule_deny_status];
++ txn->status = http_err_codes[deny_status];
+ s->logs.tv_request = now;
+- stream_int_retnclose(&s->si[0], http_error_message(s, txn->rule_deny_status));
++ stream_int_retnclose(&s->si[0], http_error_message(s, deny_status));
+ stream_inc_http_err_ctr(s);
+ sess->fe->fe_counters.denied_req++;
+ if (sess->fe != s->be)
haproxy.service-add-documentation.patch
haproxy.service-check-config-before-reload.patch
haproxy.service-use-environment-variables.patch
+CVE-2016-5360.patch
MIRA0001-Adding-include-configuration-statement-to-haproxy.patch
+++ /dev/null
-haproxy (1.4.23-1) unstable; urgency=low
-
- As of 1.4.23-1, the Debian package ships an rsyslog snippet to allow logging
- via /dev/log from chrooted HAProxy processes. If you are using rsyslog, you
- should restart rsyslog after installing this package to enable HAProxy to log
- via rsyslog. See /usr/share/doc/haproxy/README.Debian for more details.
-
- Also note that as of 1.4.23-1, chrooting the HAProxy process is enabled in the
- default Debian configuration.
-
- -- Apollon Oikonomopoulos <apoikos@gmail.com> Thu, 25 Apr 2013 23:26:35 +0300
-
-haproxy (1.4.13-1) unstable; urgency=low
-
- Maintainer of this package has changed.
-
- -- Christo Buschek <crito@30loops.net> Mon, 10 Mar 2011 22:07:10 +0100
-
-haproxy (1.3.14.2-1) unstable; urgency=low
-
- Configuration has moved to /etc/haproxy/haproxy.cfg. This allows to add the
- configurable /etc/haproxy/errors directory.
- The haproxy binary was also moved to /usr/sbin rather than /usr/bin, update
- your init script or reinstall the one provided with the package.
-
- -- Arnaud Cornet <acornet@debian.org> Mon, 21 Jan 2008 23:38:15 +0100
+++ /dev/null
-Binding non-local IPv6 addresses
-================================
-
-There are cases where HAProxy needs to bind() a non-existing address, like
-for example in high-availability setups with floating IP addresses (e.g. using
-keepalived or ucarp). For IPv4 the net.ipv4.ip_nonlocal_bind sysctl can be used
-to permit binding non-existing addresses, such a control does not exist for
-IPv6 however.
-
-The solution is to add the "transparent" parameter to the frontend's bind
-statement, for example:
-
-frontend fe1
- bind 2001:db8:abcd:f00::1:8080 transparent
-
-This will require a recent Linux kernel (>= 2.6.28) with TPROXY support (Debian
-kernels will work correctly with this option).
-
-See /usr/share/doc/haproxy/configuration.txt.gz for more information on the
-"transparent" bind parameter.
-
- -- Apollon Oikonomopoulos <apoikos@gmail.com> Wed, 16 Oct 2013 21:18:58 +0300
+++ /dev/null
-haproxy (1.6.3-1) unstable; urgency=medium
-
- [ Apollon Oikonomopoulos ]
- * haproxy.init: use s-s-d's --pidfile option.
- Thanks to Louis Bouchard (Closes: 804530)
-
- [ Vincent Bernat ]
- * watch: fix d/watch to look for 1.6 version
- * Imported Upstream version 1.6.3
-
- -- Vincent Bernat <bernat@debian.org> Thu, 31 Dec 2015 08:10:10 +0100
-
-haproxy (1.6.2-2) unstable; urgency=medium
-
- * Enable USE_REGPARM on amd64 as well.
-
- -- Vincent Bernat <bernat@debian.org> Tue, 03 Nov 2015 21:21:30 +0100
-
-haproxy (1.6.2-1) unstable; urgency=medium
-
- * New upstream release.
- - BUG/MAJOR: dns: first DNS response packet not matching queried
- hostname may lead to a loop
- - BUG/MAJOR: http: don't requeue an idle connection that is already
- queued
- * Upload to unstable.
-
- -- Vincent Bernat <bernat@debian.org> Tue, 03 Nov 2015 13:36:22 +0100
-
-haproxy (1.6.1-2) experimental; urgency=medium
-
- * Build the Lua manpage in -arch, fixes FTBFS in binary-only builds.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Thu, 22 Oct 2015 12:19:41 +0300
-
-haproxy (1.6.1-1) experimental; urgency=medium
-
- [ Vincent Bernat ]
- * New upstream release.
- - BUG/MAJOR: ssl: free the generated SSL_CTX if the LRU cache is
- disabled
- * Drop 0001-BUILD-install-only-relevant-and-existing-documentati.patch.
-
- [ Apollon Oikonomopoulos ]
- * Ship and generate Lua API documentation.
-
- -- Vincent Bernat <bernat@debian.org> Thu, 22 Oct 2015 10:45:55 +0200
-
-haproxy (1.6.0+ds1-1) experimental; urgency=medium
-
- * New upstream release!
- * Add a patch to fix documentation installation:
- + 0001-BUILD-install-only-relevant-and-existing-documentati.patch
- * Update HAProxy documentation converter to a more recent version.
-
- -- Vincent Bernat <bernat@debian.org> Wed, 14 Oct 2015 17:29:19 +0200
-
-haproxy (1.6~dev7-1) experimental; urgency=medium
-
- * New upstream release.
-
- -- Vincent Bernat <bernat@debian.org> Tue, 06 Oct 2015 16:01:26 +0200
-
-haproxy (1.6~dev5-1) experimental; urgency=medium
-
- * New upstream release.
-
- -- Vincent Bernat <bernat@debian.org> Mon, 14 Sep 2015 15:50:28 +0200
-
-haproxy (1.6~dev4-1) experimental; urgency=medium
-
- * New upstream release.
- * Refresh debian/copyright.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 30 Aug 2015 23:54:10 +0200
-
-haproxy (1.6~dev3-1) experimental; urgency=medium
-
- * New upstream release.
- * Enable Lua support.
-
- -- Vincent Bernat <bernat@debian.org> Sat, 15 Aug 2015 17:51:29 +0200
-
-haproxy (1.5.15-1) unstable; urgency=medium
-
- * New upstream stable release including the following fix:
- - BUG/MAJOR: http: don't call http_send_name_header() after an error
-
- -- Vincent Bernat <bernat@debian.org> Mon, 02 Nov 2015 07:34:19 +0100
-
-haproxy (1.5.14-1) unstable; urgency=high
-
- * New upstream version. Fix an information leak (CVE-2015-3281):
- - BUG/MAJOR: buffers: make the buffer_slow_realign() function
- respect output data.
- * Add $named as a dependency for init script. Closes: #790638.
-
- -- Vincent Bernat <bernat@debian.org> Fri, 03 Jul 2015 19:49:02 +0200
-
-haproxy (1.5.13-1) unstable; urgency=medium
-
- * New upstream stable release including the following fixes:
- - MAJOR: peers: allow peers section to be used with nbproc > 1
- - BUG/MAJOR: checks: always check for end of list before proceeding
- - MEDIUM: ssl: replace standards DH groups with custom ones
- - BUG/MEDIUM: ssl: fix tune.ssl.default-dh-param value being overwritten
- - BUG/MEDIUM: cfgparse: segfault when userlist is misused
- - BUG/MEDIUM: stats: properly initialize the scope before dumping stats
- - BUG/MEDIUM: http: don't forward client shutdown without NOLINGER
- except for tunnels
- - BUG/MEDIUM: checks: do not dereference head of a tcp-check at the end
- - BUG/MEDIUM: checks: do not dereference a list as a tcpcheck struct
- - BUG/MEDIUM: peers: apply a random reconnection timeout
- - BUG/MEDIUM: config: properly compute the default number of processes
- for a proxy
-
- -- Vincent Bernat <bernat@debian.org> Sat, 27 Jun 2015 20:52:07 +0200
-
-haproxy (1.5.12-1) unstable; urgency=medium
-
- * New upstream stable release including the following fixes:
- - BUG/MAJOR: http: don't read past buffer's end in http_replace_value
- - BUG/MAJOR: http: prevent risk of reading past end with balance
- url_param
- - BUG/MEDIUM: Do not consider an agent check as failed on L7 error
- - BUG/MEDIUM: patern: some entries are not deleted with case
- insensitive match
- - BUG/MEDIUM: buffer: one byte miss in buffer free space check
- - BUG/MEDIUM: http: thefunction "(req|res)-replace-value" doesn't
- respect the HTTP syntax
- - BUG/MEDIUM: peers: correctly configure the client timeout
- - BUG/MEDIUM: http: hdr_cnt would not count any header when called
- without name
- - BUG/MEDIUM: listener: don't report an error when resuming unbound
- listeners
- - BUG/MEDIUM: init: don't limit cpu-map to the first 32 processes only
- - BUG/MEDIUM: stream-int: always reset si->ops when si->end is
- nullified
- - BUG/MEDIUM: http: remove content-length from chunked messages
- - BUG/MEDIUM: http: do not restrict parsing of transfer-encoding to
- HTTP/1.1
- - BUG/MEDIUM: http: incorrect transfer-coding in the request is a bad
- request
- - BUG/MEDIUM: http: remove content-length form responses with bad
- transfer-encoding
- - BUG/MEDIUM: http: wait for the exact amount of body bytes in
- wait_for_request_body
-
- -- Vincent Bernat <bernat@debian.org> Sat, 02 May 2015 16:38:28 +0200
-
-haproxy (1.5.11-2) unstable; urgency=medium
-
- * Upload to unstable.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 26 Apr 2015 17:46:58 +0200
-
-haproxy (1.5.11-1) experimental; urgency=medium
-
- * New upstream stable release including the following fixes:
- - BUG/MAJOR: log: don't try to emit a log if no logger is set
- - BUG/MEDIUM: backend: correctly detect the domain when
- use_domain_only is used
- - BUG/MEDIUM: Do not set agent health to zero if server is disabled
- in config
- - BUG/MEDIUM: Only explicitly report "DOWN (agent)" if the agent health
- is zero
- - BUG/MEDIUM: http: fix header removal when previous header ends with
- pure LF
- - BUG/MEDIUM: channel: fix possible integer overflow on reserved size
- computation
- - BUG/MEDIUM: channel: don't schedule data in transit for leaving until
- connected
- - BUG/MEDIUM: http: make http-request set-header compute the string
- before removal
- * Upload to experimental.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 01 Feb 2015 09:22:27 +0100
-
-haproxy (1.5.10-1) experimental; urgency=medium
-
- * New upstream stable release including the following fixes:
- - BUG/MAJOR: stream-int: properly check the memory allocation return
- - BUG/MEDIUM: sample: fix random number upper-bound
- - BUG/MEDIUM: patterns: previous fix was incomplete
- - BUG/MEDIUM: payload: ensure that a request channel is available
- - BUG/MEDIUM: tcp-check: don't rely on random memory contents
- - BUG/MEDIUM: tcp-checks: disable quick-ack unless next rule is an expect
- - BUG/MEDIUM: config: do not propagate processes between stopped
- processes
- - BUG/MEDIUM: memory: fix freeing logic in pool_gc2()
- - BUG/MEDIUM: compression: correctly report zlib_mem
- * Upload to experimental.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 04 Jan 2015 13:17:56 +0100
-
-haproxy (1.5.9-1) experimental; urgency=medium
-
- * New upstream stable release including the following fixes:
- - BUG/MAJOR: sessions: unlink session from list on out
- of memory
- - BUG/MEDIUM: pattern: don't load more than once a pattern
- list.
- - BUG/MEDIUM: connection: sanitize PPv2 header length before
- parsing address information
- - BUG/MAJOR: frontend: initialize capture pointers earlier
- - BUG/MEDIUM: checks: fix conflicts between agent checks and
- ssl healthchecks
- - BUG/MEDIUM: ssl: force a full GC in case of memory shortage
- - BUG/MEDIUM: ssl: fix bad ssl context init can cause
- segfault in case of OOM.
- * Upload to experimental.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 07 Dec 2014 16:37:36 +0100
-
-haproxy (1.5.8-3) unstable; urgency=medium
-
- * Remove RC4 from the default cipher string shipped in configuration.
-
- -- Vincent Bernat <bernat@debian.org> Fri, 27 Feb 2015 11:29:23 +0100
-
-haproxy (1.5.8-2) unstable; urgency=medium
-
- * Cherry-pick the following patches from 1.5.9 release:
- - 8a0b93bde77e BUG/MAJOR: sessions: unlink session from list on out
- of memory
- - bae03eaad40a BUG/MEDIUM: pattern: don't load more than once a pattern
- list.
- - 93637b6e8503 BUG/MEDIUM: connection: sanitize PPv2 header length before
- parsing address information
- - 8ba50128832b BUG/MAJOR: frontend: initialize capture pointers earlier
- - 1f96a87c4e14 BUG/MEDIUM: checks: fix conflicts between agent checks and
- ssl healthchecks
- - 9bcc01ae2598 BUG/MEDIUM: ssl: force a full GC in case of memory shortage
- - 909514970089 BUG/MEDIUM: ssl: fix bad ssl context init can cause
- segfault in case of OOM.
- * Cherry-pick the following patches from future 1.5.10 release:
- - 1e89acb6be9b BUG/MEDIUM: payload: ensure that a request channel is
- available
- - bad3c6f1b6d7 BUG/MEDIUM: patterns: previous fix was incomplete
-
- -- Vincent Bernat <bernat@debian.org> Sun, 07 Dec 2014 11:11:21 +0100
-
-haproxy (1.5.8-1) unstable; urgency=medium
-
- * New upstream stable release including the following fixes:
-
- + BUG/MAJOR: buffer: check the space left is enough or not when input
- data in a buffer is wrapped
- + BUG/MINOR: ssl: correctly initialize ssl ctx for invalid certificates
- + BUG/MEDIUM: tcp: don't use SO_ORIGINAL_DST on non-AF_INET sockets
- + BUG/MEDIUM: regex: fix pcre_study error handling
- + BUG/MEDIUM: tcp: fix outgoing polling based on proxy protocol
- + BUG/MINOR: log: fix request flags when keep-alive is enabled
- + BUG/MAJOR: cli: explicitly call cli_release_handler() upon error
- + BUG/MEDIUM: http: don't dump debug headers on MSG_ERROR
- * Also includes the following new features:
- + MINOR: ssl: add statement to force some ssl options in global.
- + MINOR: ssl: add fetchs 'ssl_c_der' and 'ssl_f_der' to return DER
- formatted certs
- * Disable SSLv3 in the default configuration file.
-
- -- Vincent Bernat <bernat@debian.org> Fri, 31 Oct 2014 13:48:19 +0100
-
-haproxy (1.5.6-1) unstable; urgency=medium
-
- * New upstream stable release including the following fixes:
- + BUG/MEDIUM: systemd: set KillMode to 'mixed'
- + MINOR: systemd: Check configuration before start
- + BUG/MEDIUM: config: avoid skipping disabled proxies
- + BUG/MINOR: config: do not accept more track-sc than configured
- + BUG/MEDIUM: backend: fix URI hash when a query string is present
- * Drop systemd patches:
- + haproxy.service-also-check-on-start.patch
- + haproxy.service-set-killmode-to-mixed.patch
- * Refresh other patches.
-
- -- Vincent Bernat <bernat@debian.org> Mon, 20 Oct 2014 18:10:21 +0200
-
-haproxy (1.5.5-1) unstable; urgency=medium
-
- [ Vincent Bernat ]
- * initscript: use start-stop-daemon to reliably terminate all haproxy
- processes. Also treat stopping a non-running haproxy as success.
- (Closes: #762608, LP: #1038139)
-
- [ Apollon Oikonomopoulos ]
- * New upstream stable release including the following fixes:
- + DOC: Address issue where documentation is excluded due to a gitignore
- rule.
- + MEDIUM: Improve signal handling in systemd wrapper.
- + BUG/MINOR: config: don't propagate process binding for dynamic
- use_backend
- + MINOR: Also accept SIGHUP/SIGTERM in systemd-wrapper
- + DOC: clearly state that the "show sess" output format is not fixed
- + MINOR: stats: fix minor typo fix in stats_dump_errors_to_buffer()
- + DOC: indicate in the doc that track-sc* can wait if data are missing
- + MEDIUM: http: enable header manipulation for 101 responses
- + BUG/MEDIUM: config: propagate frontend to backend process binding again.
- + MEDIUM: config: properly propagate process binding between proxies
- + MEDIUM: config: make the frontends automatically bind to the listeners'
- processes
- + MEDIUM: config: compute the exact bind-process before listener's
- maxaccept
- + MEDIUM: config: only warn if stats are attached to multi-process bind
- directives
- + MEDIUM: config: report it when tcp-request rules are misplaced
- + MINOR: config: detect the case where a tcp-request content rule has no
- inspect-delay
- + MEDIUM: systemd-wrapper: support multiple executable versions and names
- + BUG/MEDIUM: remove debugging code from systemd-wrapper
- + BUG/MEDIUM: http: adjust close mode when switching to backend
- + BUG/MINOR: config: don't propagate process binding on fatal errors.
- + BUG/MEDIUM: check: rule-less tcp-check must detect connect failures
- + BUG/MINOR: tcp-check: report the correct failed step in the status
- + DOC: indicate that weight zero is reported as DRAIN
- * Add a new patch (haproxy.service-set-killmode-to-mixed.patch) to fix the
- systemctl stop action conflicting with the systemd wrapper now catching
- SIGTERM.
- * Bump standards to 3.9.6; no changes needed.
- * haproxy-doc: link to tracker.debian.org instead of packages.qa.debian.org.
- * d/copyright: move debian/dconv/* paragraph after debian/*, so that it
- actually matches the files it is supposed to.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Wed, 08 Oct 2014 12:34:53 +0300
-
-haproxy (1.5.4-1) unstable; urgency=high
-
- * New upstream version.
- + Fix a critical bug that, under certain unlikely conditions, allows a
- client to crash haproxy.
- * Prefix rsyslog configuration file to ensure to log only to
- /var/log/haproxy. Thanks to Paul Bourke for the patch.
-
- -- Vincent Bernat <bernat@debian.org> Tue, 02 Sep 2014 19:14:38 +0200
-
-haproxy (1.5.3-1) unstable; urgency=medium
-
- * New upstream stable release, fixing the following issues:
- + Memory corruption when building a proxy protocol v2 header
- + Memory leak in SSL DHE key exchange
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Fri, 25 Jul 2014 10:41:36 +0300
-
-haproxy (1.5.2-1) unstable; urgency=medium
-
- * New upstream stable release. Important fixes:
- + A few sample fetch functions when combined in certain ways would return
- malformed results, possibly crashing the HAProxy process.
- + Hash-based load balancing and http-send-name-header would fail for
- requests which contain a body which starts to be forwarded before the
- data is used.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Mon, 14 Jul 2014 00:42:32 +0300
-
-haproxy (1.5.1-1) unstable; urgency=medium
-
- * New upstream stable release:
- + Fix a file descriptor leak for clients that disappear before connecting.
- + Do not staple expired OCSP responses.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Tue, 24 Jun 2014 12:56:30 +0300
-
-haproxy (1.5.0-1) unstable; urgency=medium
-
- * New upstream stable series. Notable changes since the 1.4 series:
- + Native SSL support on both sides with SNI/NPN/ALPN and OCSP stapling.
- + IPv6 and UNIX sockets are supported everywhere
- + End-to-end HTTP keep-alive for better support of NTLM and improved
- efficiency in static farms
- + HTTP/1.1 response compression (deflate, gzip) to save bandwidth
- + PROXY protocol versions 1 and 2 on both sides
- + Data sampling on everything in request or response, including payload
- + ACLs can use any matching method with any input sample
- + Maps and dynamic ACLs updatable from the CLI
- + Stick-tables support counters to track activity on any input sample
- + Custom format for logs, unique-id, header rewriting, and redirects
- + Improved health checks (SSL, scripted TCP, check agent, ...)
- + Much more scalable configuration supports hundreds of thousands of
- backends and certificates without sweating
-
- * Upload to unstable, merge all 1.5 work from experimental. Most important
- packaging changes since 1.4.25-1 include:
- + systemd support.
- + A more sane default config file.
- + Zero-downtime upgrades between 1.5 releases by gracefully reloading
- HAProxy during upgrades.
- + HTML documentation shipped in the haproxy-doc package.
- + kqueue support for kfreebsd.
-
- * Packaging changes since 1.5~dev26-2:
- + Drop patches merged upstream:
- o Fix-reference-location-in-manpage.patch
- o 0001-BUILD-stats-workaround-stupid-and-bogus-Werror-forma.patch
- + d/watch: look for stable 1.5 releases
- + systemd: respect CONFIG and EXTRAOPTS when specified in
- /etc/default/haproxy.
- + initscript: test the configuration before start or reload.
- + initscript: remove the ENABLED flag and logic.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Fri, 20 Jun 2014 11:05:17 +0300
-
-haproxy (1.5~dev26-2) experimental; urgency=medium
-
- * initscript: start should not fail when haproxy is already running
- + Fixes upgrades from post-1.5~dev24-1 installations
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Wed, 04 Jun 2014 13:20:39 +0300
-
-haproxy (1.5~dev26-1) experimental; urgency=medium
-
- * New upstream development version.
- + Add a patch to fix compilation with -Werror=format-security
-
- -- Vincent Bernat <bernat@debian.org> Wed, 28 May 2014 20:32:10 +0200
-
-haproxy (1.5~dev25-1) experimental; urgency=medium
-
- [ Vincent Bernat ]
- * New upstream development version.
- * Rename "contimeout", "clitimeout" and "srvtimeout" in the default
- configuration file to "timeout connection", "timeout client" and
- "timeout server".
-
- [ Apollon Oikonomopoulos ]
- * Build on kfreebsd using the "freebsd" target; enables kqueue support.
-
- -- Vincent Bernat <bernat@debian.org> Thu, 15 May 2014 00:20:11 +0200
-
-haproxy (1.5~dev24-2) experimental; urgency=medium
-
- * New binary package: haproxy-doc
- + Contains the HTML documentation built using a version of Cyril Bonté's
- haproxy-dconv (https://github.com/cbonte/haproxy-dconv).
- + Add Build-Depends-Indep on python and python-mako
- + haproxy Suggests: haproxy-doc
- * systemd: check config file for validity on reload.
- * haproxy.cfg:
- + Enable the stats socket by default and bind it to
- /run/haproxy/admin.sock, which is accessible by the haproxy group.
- /run/haproxy creation is handled by the initscript for sysv-rc and a
- tmpfiles.d config for systemd.
- + Set the default locations for CA and server certificates to
- /etc/ssl/certs and /etc/ssl/private respectively.
- + Set the default cipher list to be used on listening SSL sockets to
- enable PFS, preferring ECDHE ciphers by default.
- * Gracefully reload HAProxy on upgrade instead of performing a full restart.
- * debian/rules: split build into binary-arch and binary-indep.
- * Build-depend on debhelper >= 9, set compat to 9.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Sun, 27 Apr 2014 13:37:17 +0300
-
-haproxy (1.5~dev24-1) experimental; urgency=medium
-
- * New upstream development version, fixes major regressions introduced in
- 1.5~dev23:
-
- + Forwarding of a message body (request or response) would automatically
- stop after the transfer timeout strikes, and with no error.
- + Redirects failed to update the msg->next offset after consuming the
- request, so if they were made with keep-alive enabled and starting with
- a slash (relative location), then the buffer was shifted by a negative
- amount of data, causing a crash.
- + The code to standardize DH parameters caused an important performance
- regression for, so it was temporarily reverted for the time needed to
- understand the cause and to fix it.
-
- For a complete release announcement, including other bugfixes and feature
- enhancements, see http://deb.li/yBVA.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Sun, 27 Apr 2014 11:09:37 +0300
-
-haproxy (1.5~dev23-1) experimental; urgency=medium
-
- * New upstream development version; notable changes since 1.5~dev22:
- + SSL record size optimizations to speed up both, small and large
- transfers.
- + Dynamic backend name support in use_backend.
- + Compressed chunked transfer encoding support.
- + Dynamic ACL manipulation via the CLI.
- + New "language" converter for extracting language preferences from
- Accept-Language headers.
- * Remove halog source and systemd unit files from
- /usr/share/doc/haproxy/contrib, they are built and shipped in their
- appropriate locations since 1.5~dev19-2.
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Wed, 23 Apr 2014 11:12:34 +0300
-
-haproxy (1.5~dev22-1) experimental; urgency=medium
-
- * New upstream development version
- * watch: use the source page and not the main one
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Mon, 03 Feb 2014 17:45:51 +0200
-
-haproxy (1.5~dev21+20140118-1) experimental; urgency=medium
-
- * New upstream development snapshot, with the following fixes since
- 1.5-dev21:
- + 00b0fb9 BUG/MAJOR: ssl: fix breakage caused by recent fix abf08d9
- + 410f810 BUG/MEDIUM: map: segmentation fault with the stats's socket
- command "set map ..."
- + abf08d9 BUG/MAJOR: connection: fix mismatch between rcv_buf's API and
- usage
- + 35249cb BUG/MINOR: pattern: pattern comparison executed twice
- + c920096 BUG/MINOR: http: don't clear the SI_FL_DONT_WAKE flag between
- requests
- + b800623 BUG/MEDIUM: stats: fix HTTP/1.0 breakage introduced in previous
- patch
- + 61f7f0a BUG/MINOR: stream-int: do not clear the owner upon unregister
- + 983eb31 BUG/MINOR: channel: CHN_INFINITE_FORWARD must be unsigned
- + a3ae932 BUG/MEDIUM: stats: the web interface must check the tracked
- servers before enabling
- + e24d963 BUG/MEDIUM: checks: unchecked servers could not be enabled
- anymore
- + 7257550 BUG/MINOR: http: always disable compression on HTTP/1.0
- + 9f708ab BUG/MINOR: checks: successful check completion must not
- re-enable MAINT servers
- + ff605db BUG/MEDIUM: backend: do not re-initialize the connection's
- context upon reuse
- + ea90063 BUG/MEDIUM: stream-int: fix the keep-alive idle connection
- handler
- * Update debian/copyright to reflect the license of ebtree/
- (closes: #732614)
- * Synchronize debian/copyright with source
- * Add Documentation field to the systemd unit file
-
- -- Apollon Oikonomopoulos <apoikos@debian.org> Mon, 20 Jan 2014 10:07:34 +0200
-
-haproxy (1.5~dev21-1) experimental; urgency=low
-
- [ Prach Pongpanich ]
- * Bump Standards-Version to 3.9.5
-
- [ Thomas Bechtold ]
- * debian/control: Add haproxy-dbg binary package for debug symbols.
-
- [ Apollon Oikonomopoulos ]
- * New upstream development version.
- * Require syslog to be operational before starting. Closes: #726323.
-
- -- Vincent Bernat <bernat@debian.org> Tue, 17 Dec 2013 01:38:04 +0700
-
-haproxy (1.5~dev19-2) experimental; urgency=low
-
- [ Vincent Bernat ]
- * Really enable systemd support by using dh-systemd helper.
- * Don't use -L/usr/lib and rely on default search path. Closes: #722777.
-
- [ Apollon Oikonomopoulos ]
- * Ship halog.
-
- -- Vincent Bernat <bernat@debian.org> Thu, 12 Sep 2013 21:58:05 +0200
-
-haproxy (1.5~dev19-1) experimental; urgency=high
-
- [ Vincent Bernat ]
- * New upstream version.
- + CVE-2013-2175: fix a possible crash when using negative header
- occurrences.
- + Drop 0002-Fix-typo-in-src-haproxy.patch: applied upstream.
- * Enable gzip compression feature.
-
- [ Prach Pongpanich ]
- * Drop bashism patch. It seems useless to maintain a patch to convert
- example scripts from /bin/bash to /bin/sh.
- * Fix reload/restart action of init script (LP: #1187469)
-
- -- Vincent Bernat <bernat@debian.org> Mon, 17 Jun 2013 22:03:58 +0200
-
-haproxy (1.5~dev18-1) experimental; urgency=low
-
- [ Apollon Oikonomopoulos ]
- * New upstream development version
-
- [ Vincent Bernat ]
- * Add support for systemd. Currently, /etc/default/haproxy is not used
- when using systemd.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 26 May 2013 12:33:00 +0200
-
-haproxy (1.4.25-1) unstable; urgency=medium
-
- [ Prach Pongpanich ]
- * New upstream version.
- * Update watch file to use the source page.
- * Bump Standards-Version to 3.9.5.
-
- [ Thomas Bechtold ]
- * debian/control: Add haproxy-dbg binary package for debug symbols.
-
- [ Apollon Oikonomopoulos ]
- * Require syslog to be operational before starting. Closes: #726323.
- * Document how to bind non-local IPv6 addresses.
- * Add a reference to configuration.txt.gz to the manpage.
- * debian/copyright: synchronize with source.
-
- -- Prach Pongpanich <prachpub@gmail.com> Fri, 28 Mar 2014 09:35:09 +0700
-
-haproxy (1.4.24-2) unstable; urgency=low
-
- [ Apollon Oikonomopoulos ]
- * Ship contrib/halog as /usr/bin/halog.
-
- [ Vincent Bernat ]
- * Don't use -L/usr/lib and rely on default search path. Closes: #722777.
-
- -- Vincent Bernat <bernat@debian.org> Sun, 15 Sep 2013 14:36:27 +0200
-
-haproxy (1.4.24-1) unstable; urgency=high
-
- [ Vincent Bernat ]
- * New upstream version.
- + CVE-2013-2175: fix a possible crash when using negative header
- occurrences.
-
- [ Prach Pongpanich ]
- * Drop bashism patch. It seems useless to maintain a patch to convert
- example scripts from /bin/bash to /bin/sh.
- * Fix reload/restart action of init script (LP: #1187469).
-
- -- Vincent Bernat <bernat@debian.org> Mon, 17 Jun 2013 21:56:26 +0200
-
-haproxy (1.4.23-1) unstable; urgency=low
-
- [ Apollon Oikonomopoulos ]
- * New upstream version (Closes: #643650, #678953)
- + This fixes CVE-2012-2942 (Closes: #674447)
- + This fixes CVE-2013-1912 (Closes: #704611)
- * Ship vim addon as vim-haproxy (Closes: #702893)
- * Check for the configuration file after sourcing /etc/default/haproxy
- (Closes: #641762)
- * Use /dev/log for logging by default (Closes: #649085)
-
- [ Vincent Bernat ]
- * debian/control:
- + add Vcs-* fields
- + switch maintenance to Debian HAProxy team. (Closes: #706890)
- + drop dependency to quilt: 3.0 (quilt) format is in use.
- * debian/rules:
- + don't explicitly call dh_installchangelog.
- + use dh_installdirs to install directories.
- + use dh_install to install error and configuration files.
- + switch to `linux2628` Makefile target for Linux.
- * debian/postrm:
- + remove haproxy user and group on purge.
- * Ship a more minimal haproxy.cfg file: no `listen` blocks but `global`
- and `defaults` block with appropriate configuration to use chroot and
- logging in the expected way.
-
- [ Prach Pongpanich ]
- * debian/copyright:
- + add missing copyright holders
- + update years of copyright
- * debian/rules:
- + build with -Wl,--as-needed to get rid of unnecessary depends
- * Remove useless files in debian/haproxy.{docs,examples}
- * Update debian/watch file, thanks to Bart Martens
-
- -- Vincent Bernat <bernat@debian.org> Mon, 06 May 2013 20:02:14 +0200
-
-haproxy (1.4.15-1) unstable; urgency=low
-
- * New upstream release with critical bug fix (Closes: #631351)
-
- -- Christo Buschek <crito@30loops.net> Thu, 14 Jul 2011 18:17:05 +0200
-
-haproxy (1.4.13-1) unstable; urgency=low
-
- * New maintainer upload (Closes: #615246)
- * New upstream release
- * Standards-version goes 3.9.1 (no change)
- * Added patch bashism (Closes: #581109)
- * Added a README.source file.
-
- -- Christo Buschek <crito@30loops.net> Thu, 11 Mar 2011 12:41:59 +0000
-
-haproxy (1.4.8-1) unstable; urgency=low
-
- * New upstream release.
-
- -- Arnaud Cornet <acornet@debian.org> Fri, 18 Jun 2010 00:42:53 +0100
-
-haproxy (1.4.4-1) unstable; urgency=low
-
- * New upstream release
- * Add splice and tproxy support
- * Add regparm optimization on i386
- * Switch to dpkg-source 3.0 (quilt) format
-
- -- Arnaud Cornet <acornet@debian.org> Thu, 15 Apr 2010 20:00:34 +0100
-
-haproxy (1.4.2-1) unstable; urgency=low
-
- * New upstream release
- * Remove debian/patches/haproxy.1-hyphen.patch gone upstream
- * Tighten quilt build dep (Closes: #567087)
- * standards-version goes 3.8.4 (no change)
- * Add $remote_fs to init.d script required start and stop
-
- -- Arnaud Cornet <acornet@debian.org> Sat, 27 Mar 2010 15:19:48 +0000
-
-haproxy (1.3.22-1) unstable; urgency=low
-
- * New upstream bugfix release
-
- -- Arnaud Cornet <acornet@debian.org> Mon, 19 Oct 2009 22:31:45 +0100
-
-haproxy (1.3.21-1) unstable; urgency=low
-
- [ Michael Shuler ]
- * New Upstream Version (Closes: #538992)
- * Added override for example shell scripts in docs (Closes: #530096)
- * Added upstream changelog to docs
- * Added debian/watch
- * Updated debian/copyright format
- * Added haproxy.1-hyphen.patch, to fix hyphen in man page
- * Upgrade Standards-Version to 3.8.3 (no change needed)
- * Upgrade debian/compat to 7 (no change needed)
-
- [ Arnaud Cornet ]
- * New upstream version.
- * Merge Michael's work, few changelog fixes
- * Add debian/README.source to point to quilt doc
- * Depend on debhelper >= 7.0.50~ and use overrides in debian/rules
-
- -- Arnaud Cornet <acornet@debian.org> Sun, 18 Oct 2009 14:01:29 +0200
-
-haproxy (1.3.18-1) unstable; urgency=low
-
- * New Upstream Version (Closes: #534583).
- * Add contrib directory in docs
-
- -- Arnaud Cornet <acornet@debian.org> Fri, 26 Jun 2009 00:11:01 +0200
-
-haproxy (1.3.15.7-2) unstable; urgency=low
-
- * Fix build without debian/patches directory (Closes: #515682) using
- /usr/share/quilt/quilt.make.
-
- -- Arnaud Cornet <acornet@debian.org> Tue, 17 Feb 2009 08:55:12 +0100
-
-haproxy (1.3.15.7-1) unstable; urgency=low
-
- * New Upstream Version.
- * Remove upstream patches:
- -use_backend-consider-unless.patch
- -segfault-url_param+check_post.patch
- -server-timeout.patch
- -closed-fd-remove.patch
- -connection-slot-during-retry.patch
- -srv_dynamic_maxconn.patch
- -do-not-pause-backends-on-reload.patch
- -acl-in-default.patch
- -cookie-capture-check.patch
- -dead-servers-queue.patch
-
- -- Arnaud Cornet <acornet@debian.org> Mon, 16 Feb 2009 11:20:21 +0100
-
-haproxy (1.3.15.2-2~lenny1) testing-proposed-updates; urgency=low
-
- * Rebuild for lenny to circumvent pcre3 shlibs bump.
-
- -- Arnaud Cornet <acornet@debian.org> Wed, 14 Jan 2009 11:28:36 +0100
-
-haproxy (1.3.15.2-2) unstable; urgency=low
-
- * Add stable branch bug fixes from upstream (Closes: #510185).
- - use_backend-consider-unless.patch: consider "unless" in use_backend
- - segfault-url_param+check_post.patch: fix segfault with url_param +
- check_post
- - server-timeout.patch: consider server timeout in all circumstances
- - closed-fd-remove.patch: drop info about closed file descriptors
- - connection-slot-during-retry.patch: do not release the connection slot
- during a retry
- - srv_dynamic_maxconn.patch: dynamic connection throttling api fix
- - do-not-pause-backends-on-reload.patch: make reload reliable
- - acl-in-default.patch: allow acl-related keywords in defaults sections
- - cookie-capture-check.patch: cookie capture is declared in the frontend
- but checked on the backend
- - dead-servers-queue.patch: make dead servers not suck pending connections
- * Add quilt build-dependancy. Use quilt in debian/rules to apply
- patches.
-
- -- Arnaud Cornet <acornet@debian.org> Wed, 31 Dec 2008 08:50:21 +0100
-
-haproxy (1.3.15.2-1) unstable; urgency=low
-
- * New Upstream Version (Closes: #497186).
-
- -- Arnaud Cornet <acornet@debian.org> Sat, 30 Aug 2008 18:06:31 +0200
-
-haproxy (1.3.15.1-1) unstable; urgency=low
-
- * New Upstream Version
- * Upgrade standards version to 3.8.0 (no change needed).
- * Build with TARGET=linux26 on linux, TARGET=generic on other systems.
-
- -- Arnaud Cornet <acornet@debian.org> Fri, 20 Jun 2008 00:38:50 +0200
-
-haproxy (1.3.14.5-1) unstable; urgency=low
-
- * New Upstream Version (Closes: #484221)
- * Use debhelper 7, drop CDBS.
-
- -- Arnaud Cornet <acornet@debian.org> Wed, 04 Jun 2008 19:21:56 +0200
-
-haproxy (1.3.14.3-1) unstable; urgency=low
-
- * New Upstream Version
- * Add status argument support to init-script to conform to LSB.
- * Cleanup pidfile after stop in init script. Init script return code fixups.
-
- -- Arnaud Cornet <acornet@debian.org> Sun, 09 Mar 2008 21:30:29 +0100
-
-haproxy (1.3.14.2-3) unstable; urgency=low
-
- * Add init script support for nbproc > 1 in configuration. That is,
- multiple haproxy processes.
- * Use 'option redispatch' instead of redispatch in debian default
- config.
-
- -- Arnaud Cornet <acornet@debian.org> Sun, 03 Feb 2008 18:22:28 +0100
-
-haproxy (1.3.14.2-2) unstable; urgency=low
-
- * Fix init scripts's reload function to use -sf instead of -st (to wait for
- active session to finish cleanly). Also support dash. Thanks to
- Jean-Baptiste Quenot for noticing.
-
- -- Arnaud Cornet <acornet@debian.org> Thu, 24 Jan 2008 23:47:26 +0100
-
-haproxy (1.3.14.2-1) unstable; urgency=low
-
- * New Upstream Version
- * Simplify DEB_MAKE_INVOKE, as upstream now supports us overriding
- CFLAGS.
- * Move haproxy to usr/sbin.
-
- -- Arnaud Cornet <acornet@debian.org> Mon, 21 Jan 2008 22:42:51 +0100
-
-haproxy (1.3.14.1-1) unstable; urgency=low
-
- * New upstream release.
- * Drop dfsg list and hash code rewrite (merged upstream).
- * Add a HAPROXY variable in init script.
- * Drop makefile patch, fix debian/rules accordingly. Drop build-dependancy
- on quilt.
- * Manpage now upstream. Ship upstream's and drop ours.
-
- -- Arnaud Cornet <acornet@debian.org> Tue, 01 Jan 2008 22:50:09 +0100
-
-haproxy (1.3.12.dfsg2-1) unstable; urgency=low
-
- * New upstream bugfix release.
- * Use new Homepage tag.
- * Bump standards-version (no change needed).
- * Add build-depend on quilt and add patch to allow proper CFLAGS passing to
- make.
-
- -- Arnaud Cornet <acornet@debian.org> Tue, 25 Dec 2007 21:52:59 +0100
-
-haproxy (1.3.12.dfsg-1) unstable; urgency=low
-
- * Initial release (Closes: #416397).
- * The DFSG removes files with GPL-incompabitle license and adds a
- re-implementation by me.
-
- -- Arnaud Cornet <acornet@debian.org> Fri, 17 Aug 2007 09:33:41 +0200
+++ /dev/null
-doc/configuration.html
-doc/intro.html
+++ /dev/null
-Source: haproxy
-Section: net
-Priority: optional
-Maintainer: Debian HAProxy Maintainers <pkg-haproxy-maintainers@lists.alioth.debian.org>
-Uploaders: Apollon Oikonomopoulos <apoikos@debian.org>,
- Prach Pongpanich <prach@debian.org>,
- Vincent Bernat <bernat@debian.org>
-Standards-Version: 3.9.6
-Build-Depends: debhelper (>= 9),
- libpcre3-dev,
- libssl-dev,
- liblua5.3-dev,
- dh-systemd (>= 1.5),
- python-sphinx (>= 1.0.7+dfsg)
-Build-Depends-Indep: python, python-mako
-Homepage: http://haproxy.1wt.eu/
-Vcs-Git: git://anonscm.debian.org/pkg-haproxy/haproxy.git
-Vcs-Browser: http://anonscm.debian.org/gitweb/?p=pkg-haproxy/haproxy.git
-
-Package: haproxy
-Architecture: any
-Depends: ${shlibs:Depends}, ${misc:Depends}, adduser
-Suggests: vim-haproxy, haproxy-doc
-Description: fast and reliable load balancing reverse proxy
- HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high
- availability environments. It features connection persistence through HTTP
- cookies, load balancing, header addition, modification, deletion both ways. It
- has request blocking capabilities and provides interface to display server
- status.
-
-Package: haproxy-dbg
-Section: debug
-Priority: extra
-Architecture: any
-Depends: ${misc:Depends}, haproxy (= ${binary:Version})
-Description: fast and reliable load balancing reverse proxy (debug symbols)
- HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high
- availability environments. It features connection persistence through HTTP
- cookies, load balancing, header addition, modification, deletion both ways. It
- has request blocking capabilities and provides interface to display server
- status.
- .
- This package contains the debugging symbols for haproxy.
-
-Package: haproxy-doc
-Section: doc
-Priority: extra
-Architecture: all
-Depends: ${misc:Depends}, libjs-bootstrap (<< 4), libjs-jquery,
- ${sphinxdoc:Depends}
-Description: fast and reliable load balancing reverse proxy (HTML documentation)
- HAProxy is a TCP/HTTP reverse proxy which is particularly suited for high
- availability environments. It features connection persistence through HTTP
- cookies, load balancing, header addition, modification, deletion both ways. It
- has request blocking capabilities and provides interface to display server
- status.
- .
- This package contains the HTML documentation for haproxy.
-
-Package: vim-haproxy
-Architecture: all
-Depends: ${misc:Depends}
-Recommends: vim-addon-manager
-Description: syntax highlighting for HAProxy configuration files
- The vim-haproxy package provides filetype detection and syntax highlighting
- for HAProxy configuration files.
- .
- As per the Debian vim policy, installed addons are not activated
- automatically, but the "vim-addon-manager" tool can be used for this purpose.
+++ /dev/null
-Format: http://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
-Upstream-Name: haproxy
-Upstream-Contact: Willy Tarreau <w@1wt.eu>
-Source: http://haproxy.1wt.eu/
-
-Files: *
-Copyright: Copyright 2000-2015 Willy Tarreau <w@1wt.eu>.
-License: GPL-2+
-
-Files: ebtree/*
- include/*
- contrib/halog/fgets2.c
-Copyright: Copyright 2000-2013 Willy Tarreau - w@1wt.eu
-License: LGPL-2.1
-
-Files: include/proto/auth.h
- include/types/checks.h
- include/types/auth.h
- src/auth.c
-Copyright: Copyright 2008-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
-License: GPL-2+
-
-Files: include/import/lru.h
- src/lru.c
-Copyright: Copyright (C) 2015 Willy Tarreau <w@1wt.eu>
-License: Expat
-
-Files: include/import/xxhash.h
- src/xxhash.c
-Copyright: Copyright (C) 2012-2014, Yann Collet.
-License: BSD-2-clause
-
-Files: include/proto/shctx.h
- src/shctx.c
-Copyright: Copyright (C) 2011-2012 EXCELIANCE
-License: GPL-2+
-
-Files: include/proto/compression.h
- include/types/compression.h
-Copyright: Copyright 2012 (C) Exceliance, David Du Colombier <dducolombier@exceliance.fr>
- William Lallemand <wlallemand@exceliance.fr>
-License: LGPL-2.1
-
-Files: include/proto/peers.h
- include/proto/ssl_sock.h
- include/types/peers.h
- include/types/ssl_sock.h
-Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
-License: LGPL-2.1
-
-Files: include/types/dns.h
-Copyright: Copyright (C) 2014 Baptiste Assmann <bedis9@gmail.com>
-License: LGPL-2.1
-
-Files: src/dns.c
-Copyright: Copyright (C) 2014 Baptiste Assmann <bedis9@gmail.com>
-License: GPL-2+
-
-Files: include/types/mailers.h
- src/mailers.c
-Copyright: Copyright 2015 Horms Solutions Ltd., Simon Horman <horms@verge.net.au>
- Copyright 2010 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
-License: LGPL-2.1
-
-Files: include/proto/sample.h
- include/proto/stick_table.h
- include/types/sample.h
- include/types/stick_table.h
-Copyright: Copyright (C) 2009-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
- Copyright (C) 2010-2013 Willy Tarreau <w@1wt.eu>
-License: LGPL-2.1
-
-Files: include/types/counters.h
-Copyright: Copyright 2008-2009 Krzysztof Piotr Oledzki <ole@ans.pl>
- Copyright 2011 Willy Tarreau <w@1wt.eu>
-License: LGPL-2.1
-
-Files: include/common/base64.h
- include/common/uri_auth.h
- include/proto/signal.h
- include/types/signal.h
-Copyright: Copyright 2000-2013 Willy Tarreau <w@1wt.eu>
-License: GPL-2+
-
-Files: include/common/rbtree.h
-Copyright: (C) 1999 Andrea Arcangeli <andrea@suse.de>
-License: GPL-2+
-
-Files: src/base64.c
- src/checks.c
- src/dumpstats.c
- src/server.c
-Copyright: Copyright 2000-2012 Willy Tarreau <w@1wt.eu>
- Copyright 2007-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
-License: GPL-2+
-
-Files: src/compression.c
-Copyright: Copyright 2012 (C) Exceliance, David Du Colombier <dducolombier@exceliance.fr>
- William Lallemand <wlallemand@exceliance.fr>
-License: GPL-2+
-
-Files: src/haproxy-systemd-wrapper.c
-Copyright: Copyright 2013 Marc-Antoine Perennou <Marc-Antoine@Perennou.com>
-License: GPL-2+
-
-Files: src/rbtree.c
-Copyright: (C) 1999 Andrea Arcangeli <andrea@suse.de>
- (C) 2002 David Woodhouse <dwmw2@infradead.org>
-License: GPL-2+
-
-Files: src/sample.c
- src/stick_table.c
-Copyright: Copyright 2009-2010 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
- Copyright (C) 2010-2012 Willy Tarreau <w@1wt.eu>
-License: GPL-2+
-
-Files: src/peers.c
- src/ssl_sock.c
-Copyright: Copyright (C) 2010-2012 EXCELIANCE, Emeric Brun <ebrun@exceliance.fr>
-License: GPL-2+
-
-Files: contrib/netsnmp-perl/haproxy.pl
- contrib/base64/base64rev-gen.c
-Copyright: Copyright 2007-2010 Krzysztof Piotr Oledzki <ole@ans.pl>
-License: GPL-2+
-
-Files: examples/stats_haproxy.sh
-Copyright: Copyright 2007 Julien Antony and Matthieu Huguet
-License: GPL-2+
-
-Files: examples/check
-Copyright: 2006-2007 (C) Fabrice Dulaunoy <fabrice@dulaunoy.com>
-License: GPL-2+
-
-Files: tests/test_pools.c
-Copyright: Copyright 2007 Aleksandar Lazic <al-haproxy@none.at>
-License: GPL-2+
-
-Files: debian/*
-Copyright: Copyright (C) 2007-2011, Arnaud Cornet <acornet@debian.org>
- Copyright (C) 2011, Christo Buschek <crito@30loops.net>
- Copyright (C) 2013, Prach Pongpanich <prachpub@gmail.com>
- Copyright (C) 2013-2014, Apollon Oikonomopoulos <apoikos@debian.org>
- Copyright (C) 2013, Vincent Bernat <bernat@debian.org>
-License: GPL-2
-
-Files: debian/dconv/*
-Copyright: Copyright (C) 2012 Cyril Bonté
-License: Apache-2.0
-
-Files: debian/dconv/js/typeahead.bundle.js
-Copyright: Copyright 2013-2015 Twitter, Inc. and other contributors
-License: Expat
-
-License: GPL-2+
- This program is free software; you can redistribute it
- and/or modify it under the terms of the GNU General Public
- License as published by the Free Software Foundation; either
- version 2 of the License, or (at your option) any later
- version.
- .
- This program is distributed in the hope that it will be
- useful, but WITHOUT ANY WARRANTY; without even the implied
- warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
- PURPOSE. See the GNU General Public License for more
- details.
- .
- You should have received a copy of the GNU General Public
- License along with this package; if not, write to the Free
- Software Foundation, Inc., 51 Franklin St, Fifth Floor,
- Boston, MA 02110-1301 USA
- .
- On Debian systems, the full text of the GNU General Public
- License version 2 can be found in the file
- `/usr/share/common-licenses/GPL-2'.
-
-License: LGPL-2.1
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
- .
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
- .
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
- .
- On Debian systems, the complete text of the GNU Lesser General Public License,
- version 2.1, can be found in /usr/share/common-licenses/LGPL-2.1.
-
-License: GPL-2
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License version 2 as
- published by the Free Software Foundation.
- .
- On Debian systems, the complete text of the GNU General Public License, version
- 2, can be found in /usr/share/common-licenses/GPL-2.
-
-License: Apache-2.0
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
- .
- http://www.apache.org/licenses/LICENSE-2.0
- .
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
- .
- On Debian systems, the full text of the Apache License version 2.0 can be
- found in the file `/usr/share/common-licenses/Apache-2.0'.
-
-License: Expat
- Permission is hereby granted, free of charge, to any person obtaining
- a copy of this software and associated documentation files (the
- "Software"), to deal in the Software without restriction, including
- without limitation the rights to use, copy, modify, merge, publish,
- distribute, sublicense, and/or sell copies of the Software, and to
- permit persons to whom the Software is furnished to do so, subject to
- the following conditions:
- .
- The above copyright notice and this permission notice shall be
- included in all copies or substantial portions of the Software.
- .
- THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
- EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
- NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
- LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
- OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
- WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-
-License: BSD-2-clause
- Redistribution and use in source and binary forms, with or without
- modification, are permitted provided that the following conditions are
- met:
- .
- * Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
- * Redistributions in binary form must reproduce the above
- copyright notice, this list of conditions and the following disclaimer
- in the documentation and/or other materials provided with the
- distribution.
- .
- THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
- "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
- LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
- A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
- OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
- SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
- LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
- DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
- THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
- (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
- OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+++ /dev/null
-
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright [yyyy] [name of copyright owner]
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
+++ /dev/null
-Copyright 2012 Cyril Bonté
-
-Licensed under the Apache License, Version 2.0 (the "License");
-you may not use this file except in compliance with the License.
-You may obtain a copy of the License at
-
-http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
+++ /dev/null
-# HAProxy Documentation Converter
-
-Made to convert the HAProxy documentation into HTML.
-
-More than HTML, the main goal is to provide easy navigation.
-
-## Documentations
-
-A bot periodically fetches last commits for HAProxy 1.4 and 1.5 to produce up-to-date documentations.
-
-Converted documentations are then stored online :
-- HAProxy 1.4 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.4.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.4.html)
-- HAProxy 1.5 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.5.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.5.html)
-- HAProxy 1.6 Configuration Manual : [stable](http://cbonte.github.com/haproxy-dconv/configuration-1.6.html) / [snapshot](http://cbonte.github.com/haproxy-dconv/snapshot/configuration-1.6.html)
-
-
-## Contribute
-
-The project now lives by itself, as it is sufficiently useable. But I'm sure we can do even better.
-Feel free to report feature requests or to provide patches !
-
+++ /dev/null
-/* Global Styles */
-
-body {
- margin-top: 50px;
- background: #eee;
-}
-
-a.anchor {
- display: block; position: relative; top: -50px; visibility: hidden;
-}
-
-/* ------------------------------- */
-
-/* Wrappers */
-
-/* ------------------------------- */
-
-#wrapper {
- width: 100%;
-}
-
-#page-wrapper {
- padding: 0 15px 50px;
- width: 740px;
- background-color: #fff;
- margin-left: 250px;
-}
-
-#sidebar {
- position: fixed;
- width: 250px;
- top: 50px;
- bottom: 0;
- padding: 15px;
- background: #f5f5f5;
- border-right: 1px solid #ccc;
-}
-
-
-/* ------------------------------- */
-
-/* Twitter typeahead.js */
-
-/* ------------------------------- */
-
-.twitter-typeahead {
- width: 100%;
-}
-.typeahead,
-.tt-query,
-.tt-hint {
- width: 100%;
- padding: 8px 12px;
- border: 2px solid #ccc;
- -webkit-border-radius: 8px;
- -moz-border-radius: 8px;
- border-radius: 8px;
- outline: none;
-}
-
-.typeahead {
- background-color: #fff;
-}
-
-.typeahead:focus {
- border: 2px solid #0097cf;
-}
-
-.tt-query {
- -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
- -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
- box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-}
-
-.tt-hint {
- color: #999
-}
-
-.tt-menu {
- width: 100%;
- margin-top: 4px;
- padding: 8px 0;
- background-color: #fff;
- border: 1px solid #ccc;
- border: 1px solid rgba(0, 0, 0, 0.2);
- -webkit-border-radius: 8px;
- -moz-border-radius: 8px;
- border-radius: 8px;
- -webkit-box-shadow: 0 5px 10px rgba(0,0,0,.2);
- -moz-box-shadow: 0 5px 10px rgba(0,0,0,.2);
- box-shadow: 0 5px 10px rgba(0,0,0,.2);
-}
-
-.tt-suggestion {
- padding: 3px 8px;
- line-height: 24px;
-}
-
-.tt-suggestion:hover {
- cursor: pointer;
- color: #fff;
- background-color: #0097cf;
-}
-
-.tt-suggestion.tt-cursor {
- color: #fff;
- background-color: #0097cf;
-
-}
-
-.tt-suggestion p {
- margin: 0;
-}
-
-#searchKeyword {
- width: 100%;
- margin: 0;
-}
-
-#searchKeyword .tt-menu {
- max-height: 300px;
- overflow-y: auto;
-}
-
-/* ------------------------------- */
-
-/* Misc */
-
-/* ------------------------------- */
-
-.well-small ul {
- padding: 0px;
-}
-.table th,
-.table td.pagination-centered {
- text-align: center;
-}
-
-pre {
- overflow: visible; /* Workaround for dropdown menus */
-}
-
-pre.text {
- padding: 0;
- font-size: 13px;
- color: #000;
- background: transparent;
- border: none;
- margin-bottom: 18px;
-}
-pre.arguments {
- font-size: 13px;
- color: #000;
- background: transparent;
-}
-
-.comment {
- color: #888;
-}
-small, .small {
- color: #888;
-}
-.level1 {
- font-size: 125%;
-}
-.sublevels {
- border-left: 1px solid #ccc;
- padding-left: 10px;
-}
-.tab {
- padding-left: 20px;
-}
-.keyword {
- font-family: Menlo, Monaco, "Courier New", monospace;
- white-space: pre;
- background: #eee;
- border-top: 1px solid #fff;
- border-bottom: 1px solid #ccc;
-}
-
-.label-see-also {
- background-color: #999;
-}
-.label-disabled {
- background-color: #ccc;
-}
-h5 {
- text-decoration: underline;
-}
-
-.example-desc {
- border-bottom: 1px solid #ccc;
- margin-bottom: 18px;
-}
-.noheight {
- min-height: 0 !important;
-}
-.separator {
- margin-bottom: 18px;
-}
-
-div {
- word-wrap: break-word;
-}
-
-html, body {
- width: 100%;
- min-height: 100%:
-}
-
-.dropdown-menu > li {
- white-space: nowrap;
-}
-/* TEMPORARILY HACKS WHILE PRE TAGS ARE USED
--------------------------------------------------- */
-
-h5,
-.unpre,
-.example-desc,
-.dropdown-menu {
- font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
- white-space: normal;
-}
+++ /dev/null
-/*!
- * typeahead.js 0.11.1
- * https://github.com/twitter/typeahead.js
- * Copyright 2013-2015 Twitter, Inc. and other contributors; Licensed MIT
- */
-
-(function(root, factory) {
- if (typeof define === "function" && define.amd) {
- define("bloodhound", [ "jquery" ], function(a0) {
- return root["Bloodhound"] = factory(a0);
- });
- } else if (typeof exports === "object") {
- module.exports = factory(require("jquery"));
- } else {
- root["Bloodhound"] = factory(jQuery);
- }
-})(this, function($) {
- var _ = function() {
- "use strict";
- return {
- isMsie: function() {
- return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false;
- },
- isBlankString: function(str) {
- return !str || /^\s*$/.test(str);
- },
- escapeRegExChars: function(str) {
- return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
- },
- isString: function(obj) {
- return typeof obj === "string";
- },
- isNumber: function(obj) {
- return typeof obj === "number";
- },
- isArray: $.isArray,
- isFunction: $.isFunction,
- isObject: $.isPlainObject,
- isUndefined: function(obj) {
- return typeof obj === "undefined";
- },
- isElement: function(obj) {
- return !!(obj && obj.nodeType === 1);
- },
- isJQuery: function(obj) {
- return obj instanceof $;
- },
- toStr: function toStr(s) {
- return _.isUndefined(s) || s === null ? "" : s + "";
- },
- bind: $.proxy,
- each: function(collection, cb) {
- $.each(collection, reverseArgs);
- function reverseArgs(index, value) {
- return cb(value, index);
- }
- },
- map: $.map,
- filter: $.grep,
- every: function(obj, test) {
- var result = true;
- if (!obj) {
- return result;
- }
- $.each(obj, function(key, val) {
- if (!(result = test.call(null, val, key, obj))) {
- return false;
- }
- });
- return !!result;
- },
- some: function(obj, test) {
- var result = false;
- if (!obj) {
- return result;
- }
- $.each(obj, function(key, val) {
- if (result = test.call(null, val, key, obj)) {
- return false;
- }
- });
- return !!result;
- },
- mixin: $.extend,
- identity: function(x) {
- return x;
- },
- clone: function(obj) {
- return $.extend(true, {}, obj);
- },
- getIdGenerator: function() {
- var counter = 0;
- return function() {
- return counter++;
- };
- },
- templatify: function templatify(obj) {
- return $.isFunction(obj) ? obj : template;
- function template() {
- return String(obj);
- }
- },
- defer: function(fn) {
- setTimeout(fn, 0);
- },
- debounce: function(func, wait, immediate) {
- var timeout, result;
- return function() {
- var context = this, args = arguments, later, callNow;
- later = function() {
- timeout = null;
- if (!immediate) {
- result = func.apply(context, args);
- }
- };
- callNow = immediate && !timeout;
- clearTimeout(timeout);
- timeout = setTimeout(later, wait);
- if (callNow) {
- result = func.apply(context, args);
- }
- return result;
- };
- },
- throttle: function(func, wait) {
- var context, args, timeout, result, previous, later;
- previous = 0;
- later = function() {
- previous = new Date();
- timeout = null;
- result = func.apply(context, args);
- };
- return function() {
- var now = new Date(), remaining = wait - (now - previous);
- context = this;
- args = arguments;
- if (remaining <= 0) {
- clearTimeout(timeout);
- timeout = null;
- previous = now;
- result = func.apply(context, args);
- } else if (!timeout) {
- timeout = setTimeout(later, remaining);
- }
- return result;
- };
- },
- stringify: function(val) {
- return _.isString(val) ? val : JSON.stringify(val);
- },
- noop: function() {}
- };
- }();
- var VERSION = "0.11.1";
- var tokenizers = function() {
- "use strict";
- return {
- nonword: nonword,
- whitespace: whitespace,
- obj: {
- nonword: getObjTokenizer(nonword),
- whitespace: getObjTokenizer(whitespace)
- }
- };
- function whitespace(str) {
- str = _.toStr(str);
- return str ? str.split(/\s+/) : [];
- }
- function nonword(str) {
- str = _.toStr(str);
- return str ? str.split(/\W+/) : [];
- }
- function getObjTokenizer(tokenizer) {
- return function setKey(keys) {
- keys = _.isArray(keys) ? keys : [].slice.call(arguments, 0);
- return function tokenize(o) {
- var tokens = [];
- _.each(keys, function(k) {
- tokens = tokens.concat(tokenizer(_.toStr(o[k])));
- });
- return tokens;
- };
- };
- }
- }();
- var LruCache = function() {
- "use strict";
- function LruCache(maxSize) {
- this.maxSize = _.isNumber(maxSize) ? maxSize : 100;
- this.reset();
- if (this.maxSize <= 0) {
- this.set = this.get = $.noop;
- }
- }
- _.mixin(LruCache.prototype, {
- set: function set(key, val) {
- var tailItem = this.list.tail, node;
- if (this.size >= this.maxSize) {
- this.list.remove(tailItem);
- delete this.hash[tailItem.key];
- this.size--;
- }
- if (node = this.hash[key]) {
- node.val = val;
- this.list.moveToFront(node);
- } else {
- node = new Node(key, val);
- this.list.add(node);
- this.hash[key] = node;
- this.size++;
- }
- },
- get: function get(key) {
- var node = this.hash[key];
- if (node) {
- this.list.moveToFront(node);
- return node.val;
- }
- },
- reset: function reset() {
- this.size = 0;
- this.hash = {};
- this.list = new List();
- }
- });
- function List() {
- this.head = this.tail = null;
- }
- _.mixin(List.prototype, {
- add: function add(node) {
- if (this.head) {
- node.next = this.head;
- this.head.prev = node;
- }
- this.head = node;
- this.tail = this.tail || node;
- },
- remove: function remove(node) {
- node.prev ? node.prev.next = node.next : this.head = node.next;
- node.next ? node.next.prev = node.prev : this.tail = node.prev;
- },
- moveToFront: function(node) {
- this.remove(node);
- this.add(node);
- }
- });
- function Node(key, val) {
- this.key = key;
- this.val = val;
- this.prev = this.next = null;
- }
- return LruCache;
- }();
- var PersistentStorage = function() {
- "use strict";
- var LOCAL_STORAGE;
- try {
- LOCAL_STORAGE = window.localStorage;
- LOCAL_STORAGE.setItem("~~~", "!");
- LOCAL_STORAGE.removeItem("~~~");
- } catch (err) {
- LOCAL_STORAGE = null;
- }
- function PersistentStorage(namespace, override) {
- this.prefix = [ "__", namespace, "__" ].join("");
- this.ttlKey = "__ttl__";
- this.keyMatcher = new RegExp("^" + _.escapeRegExChars(this.prefix));
- this.ls = override || LOCAL_STORAGE;
- !this.ls && this._noop();
- }
- _.mixin(PersistentStorage.prototype, {
- _prefix: function(key) {
- return this.prefix + key;
- },
- _ttlKey: function(key) {
- return this._prefix(key) + this.ttlKey;
- },
- _noop: function() {
- this.get = this.set = this.remove = this.clear = this.isExpired = _.noop;
- },
- _safeSet: function(key, val) {
- try {
- this.ls.setItem(key, val);
- } catch (err) {
- if (err.name === "QuotaExceededError") {
- this.clear();
- this._noop();
- }
- }
- },
- get: function(key) {
- if (this.isExpired(key)) {
- this.remove(key);
- }
- return decode(this.ls.getItem(this._prefix(key)));
- },
- set: function(key, val, ttl) {
- if (_.isNumber(ttl)) {
- this._safeSet(this._ttlKey(key), encode(now() + ttl));
- } else {
- this.ls.removeItem(this._ttlKey(key));
- }
- return this._safeSet(this._prefix(key), encode(val));
- },
- remove: function(key) {
- this.ls.removeItem(this._ttlKey(key));
- this.ls.removeItem(this._prefix(key));
- return this;
- },
- clear: function() {
- var i, keys = gatherMatchingKeys(this.keyMatcher);
- for (i = keys.length; i--; ) {
- this.remove(keys[i]);
- }
- return this;
- },
- isExpired: function(key) {
- var ttl = decode(this.ls.getItem(this._ttlKey(key)));
- return _.isNumber(ttl) && now() > ttl ? true : false;
- }
- });
- return PersistentStorage;
- function now() {
- return new Date().getTime();
- }
- function encode(val) {
- return JSON.stringify(_.isUndefined(val) ? null : val);
- }
- function decode(val) {
- return $.parseJSON(val);
- }
- function gatherMatchingKeys(keyMatcher) {
- var i, key, keys = [], len = LOCAL_STORAGE.length;
- for (i = 0; i < len; i++) {
- if ((key = LOCAL_STORAGE.key(i)).match(keyMatcher)) {
- keys.push(key.replace(keyMatcher, ""));
- }
- }
- return keys;
- }
- }();
- var Transport = function() {
- "use strict";
- var pendingRequestsCount = 0, pendingRequests = {}, maxPendingRequests = 6, sharedCache = new LruCache(10);
- function Transport(o) {
- o = o || {};
- this.cancelled = false;
- this.lastReq = null;
- this._send = o.transport;
- this._get = o.limiter ? o.limiter(this._get) : this._get;
- this._cache = o.cache === false ? new LruCache(0) : sharedCache;
- }
- Transport.setMaxPendingRequests = function setMaxPendingRequests(num) {
- maxPendingRequests = num;
- };
- Transport.resetCache = function resetCache() {
- sharedCache.reset();
- };
- _.mixin(Transport.prototype, {
- _fingerprint: function fingerprint(o) {
- o = o || {};
- return o.url + o.type + $.param(o.data || {});
- },
- _get: function(o, cb) {
- var that = this, fingerprint, jqXhr;
- fingerprint = this._fingerprint(o);
- if (this.cancelled || fingerprint !== this.lastReq) {
- return;
- }
- if (jqXhr = pendingRequests[fingerprint]) {
- jqXhr.done(done).fail(fail);
- } else if (pendingRequestsCount < maxPendingRequests) {
- pendingRequestsCount++;
- pendingRequests[fingerprint] = this._send(o).done(done).fail(fail).always(always);
- } else {
- this.onDeckRequestArgs = [].slice.call(arguments, 0);
- }
- function done(resp) {
- cb(null, resp);
- that._cache.set(fingerprint, resp);
- }
- function fail() {
- cb(true);
- }
- function always() {
- pendingRequestsCount--;
- delete pendingRequests[fingerprint];
- if (that.onDeckRequestArgs) {
- that._get.apply(that, that.onDeckRequestArgs);
- that.onDeckRequestArgs = null;
- }
- }
- },
- get: function(o, cb) {
- var resp, fingerprint;
- cb = cb || $.noop;
- o = _.isString(o) ? {
- url: o
- } : o || {};
- fingerprint = this._fingerprint(o);
- this.cancelled = false;
- this.lastReq = fingerprint;
- if (resp = this._cache.get(fingerprint)) {
- cb(null, resp);
- } else {
- this._get(o, cb);
- }
- },
- cancel: function() {
- this.cancelled = true;
- }
- });
- return Transport;
- }();
- var SearchIndex = window.SearchIndex = function() {
- "use strict";
- var CHILDREN = "c", IDS = "i";
- function SearchIndex(o) {
- o = o || {};
- if (!o.datumTokenizer || !o.queryTokenizer) {
- $.error("datumTokenizer and queryTokenizer are both required");
- }
- this.identify = o.identify || _.stringify;
- this.datumTokenizer = o.datumTokenizer;
- this.queryTokenizer = o.queryTokenizer;
- this.reset();
- }
- _.mixin(SearchIndex.prototype, {
- bootstrap: function bootstrap(o) {
- this.datums = o.datums;
- this.trie = o.trie;
- },
- add: function(data) {
- var that = this;
- data = _.isArray(data) ? data : [ data ];
- _.each(data, function(datum) {
- var id, tokens;
- that.datums[id = that.identify(datum)] = datum;
- tokens = normalizeTokens(that.datumTokenizer(datum));
- _.each(tokens, function(token) {
- var node, chars, ch;
- node = that.trie;
- chars = token.split("");
- while (ch = chars.shift()) {
- node = node[CHILDREN][ch] || (node[CHILDREN][ch] = newNode());
- node[IDS].push(id);
- }
- });
- });
- },
- get: function get(ids) {
- var that = this;
- return _.map(ids, function(id) {
- return that.datums[id];
- });
- },
- search: function search(query) {
- var that = this, tokens, matches;
- tokens = normalizeTokens(this.queryTokenizer(query));
- _.each(tokens, function(token) {
- var node, chars, ch, ids;
- if (matches && matches.length === 0) {
- return false;
- }
- node = that.trie;
- chars = token.split("");
- while (node && (ch = chars.shift())) {
- node = node[CHILDREN][ch];
- }
- if (node && chars.length === 0) {
- ids = node[IDS].slice(0);
- matches = matches ? getIntersection(matches, ids) : ids;
- } else {
- matches = [];
- return false;
- }
- });
- return matches ? _.map(unique(matches), function(id) {
- return that.datums[id];
- }) : [];
- },
- all: function all() {
- var values = [];
- for (var key in this.datums) {
- values.push(this.datums[key]);
- }
- return values;
- },
- reset: function reset() {
- this.datums = {};
- this.trie = newNode();
- },
- serialize: function serialize() {
- return {
- datums: this.datums,
- trie: this.trie
- };
- }
- });
- return SearchIndex;
- function normalizeTokens(tokens) {
- tokens = _.filter(tokens, function(token) {
- return !!token;
- });
- tokens = _.map(tokens, function(token) {
- return token.toLowerCase();
- });
- return tokens;
- }
- function newNode() {
- var node = {};
- node[IDS] = [];
- node[CHILDREN] = {};
- return node;
- }
- function unique(array) {
- var seen = {}, uniques = [];
- for (var i = 0, len = array.length; i < len; i++) {
- if (!seen[array[i]]) {
- seen[array[i]] = true;
- uniques.push(array[i]);
- }
- }
- return uniques;
- }
- function getIntersection(arrayA, arrayB) {
- var ai = 0, bi = 0, intersection = [];
- arrayA = arrayA.sort();
- arrayB = arrayB.sort();
- var lenArrayA = arrayA.length, lenArrayB = arrayB.length;
- while (ai < lenArrayA && bi < lenArrayB) {
- if (arrayA[ai] < arrayB[bi]) {
- ai++;
- } else if (arrayA[ai] > arrayB[bi]) {
- bi++;
- } else {
- intersection.push(arrayA[ai]);
- ai++;
- bi++;
- }
- }
- return intersection;
- }
- }();
- var Prefetch = function() {
- "use strict";
- var keys;
- keys = {
- data: "data",
- protocol: "protocol",
- thumbprint: "thumbprint"
- };
- function Prefetch(o) {
- this.url = o.url;
- this.ttl = o.ttl;
- this.cache = o.cache;
- this.prepare = o.prepare;
- this.transform = o.transform;
- this.transport = o.transport;
- this.thumbprint = o.thumbprint;
- this.storage = new PersistentStorage(o.cacheKey);
- }
- _.mixin(Prefetch.prototype, {
- _settings: function settings() {
- return {
- url: this.url,
- type: "GET",
- dataType: "json"
- };
- },
- store: function store(data) {
- if (!this.cache) {
- return;
- }
- this.storage.set(keys.data, data, this.ttl);
- this.storage.set(keys.protocol, location.protocol, this.ttl);
- this.storage.set(keys.thumbprint, this.thumbprint, this.ttl);
- },
- fromCache: function fromCache() {
- var stored = {}, isExpired;
- if (!this.cache) {
- return null;
- }
- stored.data = this.storage.get(keys.data);
- stored.protocol = this.storage.get(keys.protocol);
- stored.thumbprint = this.storage.get(keys.thumbprint);
- isExpired = stored.thumbprint !== this.thumbprint || stored.protocol !== location.protocol;
- return stored.data && !isExpired ? stored.data : null;
- },
- fromNetwork: function(cb) {
- var that = this, settings;
- if (!cb) {
- return;
- }
- settings = this.prepare(this._settings());
- this.transport(settings).fail(onError).done(onResponse);
- function onError() {
- cb(true);
- }
- function onResponse(resp) {
- cb(null, that.transform(resp));
- }
- },
- clear: function clear() {
- this.storage.clear();
- return this;
- }
- });
- return Prefetch;
- }();
- var Remote = function() {
- "use strict";
- function Remote(o) {
- this.url = o.url;
- this.prepare = o.prepare;
- this.transform = o.transform;
- this.transport = new Transport({
- cache: o.cache,
- limiter: o.limiter,
- transport: o.transport
- });
- }
- _.mixin(Remote.prototype, {
- _settings: function settings() {
- return {
- url: this.url,
- type: "GET",
- dataType: "json"
- };
- },
- get: function get(query, cb) {
- var that = this, settings;
- if (!cb) {
- return;
- }
- query = query || "";
- settings = this.prepare(query, this._settings());
- return this.transport.get(settings, onResponse);
- function onResponse(err, resp) {
- err ? cb([]) : cb(that.transform(resp));
- }
- },
- cancelLastRequest: function cancelLastRequest() {
- this.transport.cancel();
- }
- });
- return Remote;
- }();
- var oParser = function() {
- "use strict";
- return function parse(o) {
- var defaults, sorter;
- defaults = {
- initialize: true,
- identify: _.stringify,
- datumTokenizer: null,
- queryTokenizer: null,
- sufficient: 5,
- sorter: null,
- local: [],
- prefetch: null,
- remote: null
- };
- o = _.mixin(defaults, o || {});
- !o.datumTokenizer && $.error("datumTokenizer is required");
- !o.queryTokenizer && $.error("queryTokenizer is required");
- sorter = o.sorter;
- o.sorter = sorter ? function(x) {
- return x.sort(sorter);
- } : _.identity;
- o.local = _.isFunction(o.local) ? o.local() : o.local;
- o.prefetch = parsePrefetch(o.prefetch);
- o.remote = parseRemote(o.remote);
- return o;
- };
- function parsePrefetch(o) {
- var defaults;
- if (!o) {
- return null;
- }
- defaults = {
- url: null,
- ttl: 24 * 60 * 60 * 1e3,
- cache: true,
- cacheKey: null,
- thumbprint: "",
- prepare: _.identity,
- transform: _.identity,
- transport: null
- };
- o = _.isString(o) ? {
- url: o
- } : o;
- o = _.mixin(defaults, o);
- !o.url && $.error("prefetch requires url to be set");
- o.transform = o.filter || o.transform;
- o.cacheKey = o.cacheKey || o.url;
- o.thumbprint = VERSION + o.thumbprint;
- o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax;
- return o;
- }
- function parseRemote(o) {
- var defaults;
- if (!o) {
- return;
- }
- defaults = {
- url: null,
- cache: true,
- prepare: null,
- replace: null,
- wildcard: null,
- limiter: null,
- rateLimitBy: "debounce",
- rateLimitWait: 300,
- transform: _.identity,
- transport: null
- };
- o = _.isString(o) ? {
- url: o
- } : o;
- o = _.mixin(defaults, o);
- !o.url && $.error("remote requires url to be set");
- o.transform = o.filter || o.transform;
- o.prepare = toRemotePrepare(o);
- o.limiter = toLimiter(o);
- o.transport = o.transport ? callbackToDeferred(o.transport) : $.ajax;
- delete o.replace;
- delete o.wildcard;
- delete o.rateLimitBy;
- delete o.rateLimitWait;
- return o;
- }
- function toRemotePrepare(o) {
- var prepare, replace, wildcard;
- prepare = o.prepare;
- replace = o.replace;
- wildcard = o.wildcard;
- if (prepare) {
- return prepare;
- }
- if (replace) {
- prepare = prepareByReplace;
- } else if (o.wildcard) {
- prepare = prepareByWildcard;
- } else {
- prepare = idenityPrepare;
- }
- return prepare;
- function prepareByReplace(query, settings) {
- settings.url = replace(settings.url, query);
- return settings;
- }
- function prepareByWildcard(query, settings) {
- settings.url = settings.url.replace(wildcard, encodeURIComponent(query));
- return settings;
- }
- function idenityPrepare(query, settings) {
- return settings;
- }
- }
- function toLimiter(o) {
- var limiter, method, wait;
- limiter = o.limiter;
- method = o.rateLimitBy;
- wait = o.rateLimitWait;
- if (!limiter) {
- limiter = /^throttle$/i.test(method) ? throttle(wait) : debounce(wait);
- }
- return limiter;
- function debounce(wait) {
- return function debounce(fn) {
- return _.debounce(fn, wait);
- };
- }
- function throttle(wait) {
- return function throttle(fn) {
- return _.throttle(fn, wait);
- };
- }
- }
- function callbackToDeferred(fn) {
- return function wrapper(o) {
- var deferred = $.Deferred();
- fn(o, onSuccess, onError);
- return deferred;
- function onSuccess(resp) {
- _.defer(function() {
- deferred.resolve(resp);
- });
- }
- function onError(err) {
- _.defer(function() {
- deferred.reject(err);
- });
- }
- };
- }
- }();
- var Bloodhound = function() {
- "use strict";
- var old;
- old = window && window.Bloodhound;
- function Bloodhound(o) {
- o = oParser(o);
- this.sorter = o.sorter;
- this.identify = o.identify;
- this.sufficient = o.sufficient;
- this.local = o.local;
- this.remote = o.remote ? new Remote(o.remote) : null;
- this.prefetch = o.prefetch ? new Prefetch(o.prefetch) : null;
- this.index = new SearchIndex({
- identify: this.identify,
- datumTokenizer: o.datumTokenizer,
- queryTokenizer: o.queryTokenizer
- });
- o.initialize !== false && this.initialize();
- }
- Bloodhound.noConflict = function noConflict() {
- window && (window.Bloodhound = old);
- return Bloodhound;
- };
- Bloodhound.tokenizers = tokenizers;
- _.mixin(Bloodhound.prototype, {
- __ttAdapter: function ttAdapter() {
- var that = this;
- return this.remote ? withAsync : withoutAsync;
- function withAsync(query, sync, async) {
- return that.search(query, sync, async);
- }
- function withoutAsync(query, sync) {
- return that.search(query, sync);
- }
- },
- _loadPrefetch: function loadPrefetch() {
- var that = this, deferred, serialized;
- deferred = $.Deferred();
- if (!this.prefetch) {
- deferred.resolve();
- } else if (serialized = this.prefetch.fromCache()) {
- this.index.bootstrap(serialized);
- deferred.resolve();
- } else {
- this.prefetch.fromNetwork(done);
- }
- return deferred.promise();
- function done(err, data) {
- if (err) {
- return deferred.reject();
- }
- that.add(data);
- that.prefetch.store(that.index.serialize());
- deferred.resolve();
- }
- },
- _initialize: function initialize() {
- var that = this, deferred;
- this.clear();
- (this.initPromise = this._loadPrefetch()).done(addLocalToIndex);
- return this.initPromise;
- function addLocalToIndex() {
- that.add(that.local);
- }
- },
- initialize: function initialize(force) {
- return !this.initPromise || force ? this._initialize() : this.initPromise;
- },
- add: function add(data) {
- this.index.add(data);
- return this;
- },
- get: function get(ids) {
- ids = _.isArray(ids) ? ids : [].slice.call(arguments);
- return this.index.get(ids);
- },
- search: function search(query, sync, async) {
- var that = this, local;
- local = this.sorter(this.index.search(query));
- sync(this.remote ? local.slice() : local);
- if (this.remote && local.length < this.sufficient) {
- this.remote.get(query, processRemote);
- } else if (this.remote) {
- this.remote.cancelLastRequest();
- }
- return this;
- function processRemote(remote) {
- var nonDuplicates = [];
- _.each(remote, function(r) {
- !_.some(local, function(l) {
- return that.identify(r) === that.identify(l);
- }) && nonDuplicates.push(r);
- });
- async && async(nonDuplicates);
- }
- },
- all: function all() {
- return this.index.all();
- },
- clear: function clear() {
- this.index.reset();
- return this;
- },
- clearPrefetchCache: function clearPrefetchCache() {
- this.prefetch && this.prefetch.clear();
- return this;
- },
- clearRemoteCache: function clearRemoteCache() {
- Transport.resetCache();
- return this;
- },
- ttAdapter: function ttAdapter() {
- return this.__ttAdapter();
- }
- });
- return Bloodhound;
- }();
- return Bloodhound;
-});
-
-(function(root, factory) {
- if (typeof define === "function" && define.amd) {
- define("typeahead.js", [ "jquery" ], function(a0) {
- return factory(a0);
- });
- } else if (typeof exports === "object") {
- module.exports = factory(require("jquery"));
- } else {
- factory(jQuery);
- }
-})(this, function($) {
- var _ = function() {
- "use strict";
- return {
- isMsie: function() {
- return /(msie|trident)/i.test(navigator.userAgent) ? navigator.userAgent.match(/(msie |rv:)(\d+(.\d+)?)/i)[2] : false;
- },
- isBlankString: function(str) {
- return !str || /^\s*$/.test(str);
- },
- escapeRegExChars: function(str) {
- return str.replace(/[\-\[\]\/\{\}\(\)\*\+\?\.\\\^\$\|]/g, "\\$&");
- },
- isString: function(obj) {
- return typeof obj === "string";
- },
- isNumber: function(obj) {
- return typeof obj === "number";
- },
- isArray: $.isArray,
- isFunction: $.isFunction,
- isObject: $.isPlainObject,
- isUndefined: function(obj) {
- return typeof obj === "undefined";
- },
- isElement: function(obj) {
- return !!(obj && obj.nodeType === 1);
- },
- isJQuery: function(obj) {
- return obj instanceof $;
- },
- toStr: function toStr(s) {
- return _.isUndefined(s) || s === null ? "" : s + "";
- },
- bind: $.proxy,
- each: function(collection, cb) {
- $.each(collection, reverseArgs);
- function reverseArgs(index, value) {
- return cb(value, index);
- }
- },
- map: $.map,
- filter: $.grep,
- every: function(obj, test) {
- var result = true;
- if (!obj) {
- return result;
- }
- $.each(obj, function(key, val) {
- if (!(result = test.call(null, val, key, obj))) {
- return false;
- }
- });
- return !!result;
- },
- some: function(obj, test) {
- var result = false;
- if (!obj) {
- return result;
- }
- $.each(obj, function(key, val) {
- if (result = test.call(null, val, key, obj)) {
- return false;
- }
- });
- return !!result;
- },
- mixin: $.extend,
- identity: function(x) {
- return x;
- },
- clone: function(obj) {
- return $.extend(true, {}, obj);
- },
- getIdGenerator: function() {
- var counter = 0;
- return function() {
- return counter++;
- };
- },
- templatify: function templatify(obj) {
- return $.isFunction(obj) ? obj : template;
- function template() {
- return String(obj);
- }
- },
- defer: function(fn) {
- setTimeout(fn, 0);
- },
- debounce: function(func, wait, immediate) {
- var timeout, result;
- return function() {
- var context = this, args = arguments, later, callNow;
- later = function() {
- timeout = null;
- if (!immediate) {
- result = func.apply(context, args);
- }
- };
- callNow = immediate && !timeout;
- clearTimeout(timeout);
- timeout = setTimeout(later, wait);
- if (callNow) {
- result = func.apply(context, args);
- }
- return result;
- };
- },
- throttle: function(func, wait) {
- var context, args, timeout, result, previous, later;
- previous = 0;
- later = function() {
- previous = new Date();
- timeout = null;
- result = func.apply(context, args);
- };
- return function() {
- var now = new Date(), remaining = wait - (now - previous);
- context = this;
- args = arguments;
- if (remaining <= 0) {
- clearTimeout(timeout);
- timeout = null;
- previous = now;
- result = func.apply(context, args);
- } else if (!timeout) {
- timeout = setTimeout(later, remaining);
- }
- return result;
- };
- },
- stringify: function(val) {
- return _.isString(val) ? val : JSON.stringify(val);
- },
- noop: function() {}
- };
- }();
- var WWW = function() {
- "use strict";
- var defaultClassNames = {
- wrapper: "twitter-typeahead",
- input: "tt-input",
- hint: "tt-hint",
- menu: "tt-menu",
- dataset: "tt-dataset",
- suggestion: "tt-suggestion",
- selectable: "tt-selectable",
- empty: "tt-empty",
- open: "tt-open",
- cursor: "tt-cursor",
- highlight: "tt-highlight"
- };
- return build;
- function build(o) {
- var www, classes;
- classes = _.mixin({}, defaultClassNames, o);
- www = {
- css: buildCss(),
- classes: classes,
- html: buildHtml(classes),
- selectors: buildSelectors(classes)
- };
- return {
- css: www.css,
- html: www.html,
- classes: www.classes,
- selectors: www.selectors,
- mixin: function(o) {
- _.mixin(o, www);
- }
- };
- }
- function buildHtml(c) {
- return {
- wrapper: '<span class="' + c.wrapper + '"></span>',
- menu: '<div class="' + c.menu + '"></div>'
- };
- }
- function buildSelectors(classes) {
- var selectors = {};
- _.each(classes, function(v, k) {
- selectors[k] = "." + v;
- });
- return selectors;
- }
- function buildCss() {
- var css = {
- wrapper: {
- position: "relative",
- display: "inline-block"
- },
- hint: {
- position: "absolute",
- top: "0",
- left: "0",
- borderColor: "transparent",
- boxShadow: "none",
- opacity: "1"
- },
- input: {
- position: "relative",
- verticalAlign: "top",
- backgroundColor: "transparent"
- },
- inputWithNoHint: {
- position: "relative",
- verticalAlign: "top"
- },
- menu: {
- position: "absolute",
- top: "100%",
- left: "0",
- zIndex: "100",
- display: "none"
- },
- ltr: {
- left: "0",
- right: "auto"
- },
- rtl: {
- left: "auto",
- right: " 0"
- }
- };
- if (_.isMsie()) {
- _.mixin(css.input, {
- backgroundImage: "url(data:image/gif;base64,R0lGODlhAQABAIAAAAAAAP///yH5BAEAAAAALAAAAAABAAEAAAIBRAA7)"
- });
- }
- return css;
- }
- }();
- var EventBus = function() {
- "use strict";
- var namespace, deprecationMap;
- namespace = "typeahead:";
- deprecationMap = {
- render: "rendered",
- cursorchange: "cursorchanged",
- select: "selected",
- autocomplete: "autocompleted"
- };
- function EventBus(o) {
- if (!o || !o.el) {
- $.error("EventBus initialized without el");
- }
- this.$el = $(o.el);
- }
- _.mixin(EventBus.prototype, {
- _trigger: function(type, args) {
- var $e;
- $e = $.Event(namespace + type);
- (args = args || []).unshift($e);
- this.$el.trigger.apply(this.$el, args);
- return $e;
- },
- before: function(type) {
- var args, $e;
- args = [].slice.call(arguments, 1);
- $e = this._trigger("before" + type, args);
- return $e.isDefaultPrevented();
- },
- trigger: function(type) {
- var deprecatedType;
- this._trigger(type, [].slice.call(arguments, 1));
- if (deprecatedType = deprecationMap[type]) {
- this._trigger(deprecatedType, [].slice.call(arguments, 1));
- }
- }
- });
- return EventBus;
- }();
- var EventEmitter = function() {
- "use strict";
- var splitter = /\s+/, nextTick = getNextTick();
- return {
- onSync: onSync,
- onAsync: onAsync,
- off: off,
- trigger: trigger
- };
- function on(method, types, cb, context) {
- var type;
- if (!cb) {
- return this;
- }
- types = types.split(splitter);
- cb = context ? bindContext(cb, context) : cb;
- this._callbacks = this._callbacks || {};
- while (type = types.shift()) {
- this._callbacks[type] = this._callbacks[type] || {
- sync: [],
- async: []
- };
- this._callbacks[type][method].push(cb);
- }
- return this;
- }
- function onAsync(types, cb, context) {
- return on.call(this, "async", types, cb, context);
- }
- function onSync(types, cb, context) {
- return on.call(this, "sync", types, cb, context);
- }
- function off(types) {
- var type;
- if (!this._callbacks) {
- return this;
- }
- types = types.split(splitter);
- while (type = types.shift()) {
- delete this._callbacks[type];
- }
- return this;
- }
- function trigger(types) {
- var type, callbacks, args, syncFlush, asyncFlush;
- if (!this._callbacks) {
- return this;
- }
- types = types.split(splitter);
- args = [].slice.call(arguments, 1);
- while ((type = types.shift()) && (callbacks = this._callbacks[type])) {
- syncFlush = getFlush(callbacks.sync, this, [ type ].concat(args));
- asyncFlush = getFlush(callbacks.async, this, [ type ].concat(args));
- syncFlush() && nextTick(asyncFlush);
- }
- return this;
- }
- function getFlush(callbacks, context, args) {
- return flush;
- function flush() {
- var cancelled;
- for (var i = 0, len = callbacks.length; !cancelled && i < len; i += 1) {
- cancelled = callbacks[i].apply(context, args) === false;
- }
- return !cancelled;
- }
- }
- function getNextTick() {
- var nextTickFn;
- if (window.setImmediate) {
- nextTickFn = function nextTickSetImmediate(fn) {
- setImmediate(function() {
- fn();
- });
- };
- } else {
- nextTickFn = function nextTickSetTimeout(fn) {
- setTimeout(function() {
- fn();
- }, 0);
- };
- }
- return nextTickFn;
- }
- function bindContext(fn, context) {
- return fn.bind ? fn.bind(context) : function() {
- fn.apply(context, [].slice.call(arguments, 0));
- };
- }
- }();
- var highlight = function(doc) {
- "use strict";
- var defaults = {
- node: null,
- pattern: null,
- tagName: "strong",
- className: null,
- wordsOnly: false,
- caseSensitive: false
- };
- return function hightlight(o) {
- var regex;
- o = _.mixin({}, defaults, o);
- if (!o.node || !o.pattern) {
- return;
- }
- o.pattern = _.isArray(o.pattern) ? o.pattern : [ o.pattern ];
- regex = getRegex(o.pattern, o.caseSensitive, o.wordsOnly);
- traverse(o.node, hightlightTextNode);
- function hightlightTextNode(textNode) {
- var match, patternNode, wrapperNode;
- if (match = regex.exec(textNode.data)) {
- wrapperNode = doc.createElement(o.tagName);
- o.className && (wrapperNode.className = o.className);
- patternNode = textNode.splitText(match.index);
- patternNode.splitText(match[0].length);
- wrapperNode.appendChild(patternNode.cloneNode(true));
- textNode.parentNode.replaceChild(wrapperNode, patternNode);
- }
- return !!match;
- }
- function traverse(el, hightlightTextNode) {
- var childNode, TEXT_NODE_TYPE = 3;
- for (var i = 0; i < el.childNodes.length; i++) {
- childNode = el.childNodes[i];
- if (childNode.nodeType === TEXT_NODE_TYPE) {
- i += hightlightTextNode(childNode) ? 1 : 0;
- } else {
- traverse(childNode, hightlightTextNode);
- }
- }
- }
- };
- function getRegex(patterns, caseSensitive, wordsOnly) {
- var escapedPatterns = [], regexStr;
- for (var i = 0, len = patterns.length; i < len; i++) {
- escapedPatterns.push(_.escapeRegExChars(patterns[i]));
- }
- regexStr = wordsOnly ? "\\b(" + escapedPatterns.join("|") + ")\\b" : "(" + escapedPatterns.join("|") + ")";
- return caseSensitive ? new RegExp(regexStr) : new RegExp(regexStr, "i");
- }
- }(window.document);
- var Input = function() {
- "use strict";
- var specialKeyCodeMap;
- specialKeyCodeMap = {
- 9: "tab",
- 27: "esc",
- 37: "left",
- 39: "right",
- 13: "enter",
- 38: "up",
- 40: "down"
- };
- function Input(o, www) {
- o = o || {};
- if (!o.input) {
- $.error("input is missing");
- }
- www.mixin(this);
- this.$hint = $(o.hint);
- this.$input = $(o.input);
- this.query = this.$input.val();
- this.queryWhenFocused = this.hasFocus() ? this.query : null;
- this.$overflowHelper = buildOverflowHelper(this.$input);
- this._checkLanguageDirection();
- if (this.$hint.length === 0) {
- this.setHint = this.getHint = this.clearHint = this.clearHintIfInvalid = _.noop;
- }
- }
- Input.normalizeQuery = function(str) {
- return _.toStr(str).replace(/^\s*/g, "").replace(/\s{2,}/g, " ");
- };
- _.mixin(Input.prototype, EventEmitter, {
- _onBlur: function onBlur() {
- this.resetInputValue();
- this.trigger("blurred");
- },
- _onFocus: function onFocus() {
- this.queryWhenFocused = this.query;
- this.trigger("focused");
- },
- _onKeydown: function onKeydown($e) {
- var keyName = specialKeyCodeMap[$e.which || $e.keyCode];
- this._managePreventDefault(keyName, $e);
- if (keyName && this._shouldTrigger(keyName, $e)) {
- this.trigger(keyName + "Keyed", $e);
- }
- },
- _onInput: function onInput() {
- this._setQuery(this.getInputValue());
- this.clearHintIfInvalid();
- this._checkLanguageDirection();
- },
- _managePreventDefault: function managePreventDefault(keyName, $e) {
- var preventDefault;
- switch (keyName) {
- case "up":
- case "down":
- preventDefault = !withModifier($e);
- break;
-
- default:
- preventDefault = false;
- }
- preventDefault && $e.preventDefault();
- },
- _shouldTrigger: function shouldTrigger(keyName, $e) {
- var trigger;
- switch (keyName) {
- case "tab":
- trigger = !withModifier($e);
- break;
-
- default:
- trigger = true;
- }
- return trigger;
- },
- _checkLanguageDirection: function checkLanguageDirection() {
- var dir = (this.$input.css("direction") || "ltr").toLowerCase();
- if (this.dir !== dir) {
- this.dir = dir;
- this.$hint.attr("dir", dir);
- this.trigger("langDirChanged", dir);
- }
- },
- _setQuery: function setQuery(val, silent) {
- var areEquivalent, hasDifferentWhitespace;
- areEquivalent = areQueriesEquivalent(val, this.query);
- hasDifferentWhitespace = areEquivalent ? this.query.length !== val.length : false;
- this.query = val;
- if (!silent && !areEquivalent) {
- this.trigger("queryChanged", this.query);
- } else if (!silent && hasDifferentWhitespace) {
- this.trigger("whitespaceChanged", this.query);
- }
- },
- bind: function() {
- var that = this, onBlur, onFocus, onKeydown, onInput;
- onBlur = _.bind(this._onBlur, this);
- onFocus = _.bind(this._onFocus, this);
- onKeydown = _.bind(this._onKeydown, this);
- onInput = _.bind(this._onInput, this);
- this.$input.on("blur.tt", onBlur).on("focus.tt", onFocus).on("keydown.tt", onKeydown);
- if (!_.isMsie() || _.isMsie() > 9) {
- this.$input.on("input.tt", onInput);
- } else {
- this.$input.on("keydown.tt keypress.tt cut.tt paste.tt", function($e) {
- if (specialKeyCodeMap[$e.which || $e.keyCode]) {
- return;
- }
- _.defer(_.bind(that._onInput, that, $e));
- });
- }
- return this;
- },
- focus: function focus() {
- this.$input.focus();
- },
- blur: function blur() {
- this.$input.blur();
- },
- getLangDir: function getLangDir() {
- return this.dir;
- },
- getQuery: function getQuery() {
- return this.query || "";
- },
- setQuery: function setQuery(val, silent) {
- this.setInputValue(val);
- this._setQuery(val, silent);
- },
- hasQueryChangedSinceLastFocus: function hasQueryChangedSinceLastFocus() {
- return this.query !== this.queryWhenFocused;
- },
- getInputValue: function getInputValue() {
- return this.$input.val();
- },
- setInputValue: function setInputValue(value) {
- this.$input.val(value);
- this.clearHintIfInvalid();
- this._checkLanguageDirection();
- },
- resetInputValue: function resetInputValue() {
- this.setInputValue(this.query);
- },
- getHint: function getHint() {
- return this.$hint.val();
- },
- setHint: function setHint(value) {
- this.$hint.val(value);
- },
- clearHint: function clearHint() {
- this.setHint("");
- },
- clearHintIfInvalid: function clearHintIfInvalid() {
- var val, hint, valIsPrefixOfHint, isValid;
- val = this.getInputValue();
- hint = this.getHint();
- valIsPrefixOfHint = val !== hint && hint.indexOf(val) === 0;
- isValid = val !== "" && valIsPrefixOfHint && !this.hasOverflow();
- !isValid && this.clearHint();
- },
- hasFocus: function hasFocus() {
- return this.$input.is(":focus");
- },
- hasOverflow: function hasOverflow() {
- var constraint = this.$input.width() - 2;
- this.$overflowHelper.text(this.getInputValue());
- return this.$overflowHelper.width() >= constraint;
- },
- isCursorAtEnd: function() {
- var valueLength, selectionStart, range;
- valueLength = this.$input.val().length;
- selectionStart = this.$input[0].selectionStart;
- if (_.isNumber(selectionStart)) {
- return selectionStart === valueLength;
- } else if (document.selection) {
- range = document.selection.createRange();
- range.moveStart("character", -valueLength);
- return valueLength === range.text.length;
- }
- return true;
- },
- destroy: function destroy() {
- this.$hint.off(".tt");
- this.$input.off(".tt");
- this.$overflowHelper.remove();
- this.$hint = this.$input = this.$overflowHelper = $("<div>");
- }
- });
- return Input;
- function buildOverflowHelper($input) {
- return $('<pre aria-hidden="true"></pre>').css({
- position: "absolute",
- visibility: "hidden",
- whiteSpace: "pre",
- fontFamily: $input.css("font-family"),
- fontSize: $input.css("font-size"),
- fontStyle: $input.css("font-style"),
- fontVariant: $input.css("font-variant"),
- fontWeight: $input.css("font-weight"),
- wordSpacing: $input.css("word-spacing"),
- letterSpacing: $input.css("letter-spacing"),
- textIndent: $input.css("text-indent"),
- textRendering: $input.css("text-rendering"),
- textTransform: $input.css("text-transform")
- }).insertAfter($input);
- }
- function areQueriesEquivalent(a, b) {
- return Input.normalizeQuery(a) === Input.normalizeQuery(b);
- }
- function withModifier($e) {
- return $e.altKey || $e.ctrlKey || $e.metaKey || $e.shiftKey;
- }
- }();
- var Dataset = function() {
- "use strict";
- var keys, nameGenerator;
- keys = {
- val: "tt-selectable-display",
- obj: "tt-selectable-object"
- };
- nameGenerator = _.getIdGenerator();
- function Dataset(o, www) {
- o = o || {};
- o.templates = o.templates || {};
- o.templates.notFound = o.templates.notFound || o.templates.empty;
- if (!o.source) {
- $.error("missing source");
- }
- if (!o.node) {
- $.error("missing node");
- }
- if (o.name && !isValidName(o.name)) {
- $.error("invalid dataset name: " + o.name);
- }
- www.mixin(this);
- this.highlight = !!o.highlight;
- this.name = o.name || nameGenerator();
- this.limit = o.limit || 5;
- this.displayFn = getDisplayFn(o.display || o.displayKey);
- this.templates = getTemplates(o.templates, this.displayFn);
- this.source = o.source.__ttAdapter ? o.source.__ttAdapter() : o.source;
- this.async = _.isUndefined(o.async) ? this.source.length > 2 : !!o.async;
- this._resetLastSuggestion();
- this.$el = $(o.node).addClass(this.classes.dataset).addClass(this.classes.dataset + "-" + this.name);
- }
- Dataset.extractData = function extractData(el) {
- var $el = $(el);
- if ($el.data(keys.obj)) {
- return {
- val: $el.data(keys.val) || "",
- obj: $el.data(keys.obj) || null
- };
- }
- return null;
- };
- _.mixin(Dataset.prototype, EventEmitter, {
- _overwrite: function overwrite(query, suggestions) {
- suggestions = suggestions || [];
- if (suggestions.length) {
- this._renderSuggestions(query, suggestions);
- } else if (this.async && this.templates.pending) {
- this._renderPending(query);
- } else if (!this.async && this.templates.notFound) {
- this._renderNotFound(query);
- } else {
- this._empty();
- }
- this.trigger("rendered", this.name, suggestions, false);
- },
- _append: function append(query, suggestions) {
- suggestions = suggestions || [];
- if (suggestions.length && this.$lastSuggestion.length) {
- this._appendSuggestions(query, suggestions);
- } else if (suggestions.length) {
- this._renderSuggestions(query, suggestions);
- } else if (!this.$lastSuggestion.length && this.templates.notFound) {
- this._renderNotFound(query);
- }
- this.trigger("rendered", this.name, suggestions, true);
- },
- _renderSuggestions: function renderSuggestions(query, suggestions) {
- var $fragment;
- $fragment = this._getSuggestionsFragment(query, suggestions);
- this.$lastSuggestion = $fragment.children().last();
- this.$el.html($fragment).prepend(this._getHeader(query, suggestions)).append(this._getFooter(query, suggestions));
- },
- _appendSuggestions: function appendSuggestions(query, suggestions) {
- var $fragment, $lastSuggestion;
- $fragment = this._getSuggestionsFragment(query, suggestions);
- $lastSuggestion = $fragment.children().last();
- this.$lastSuggestion.after($fragment);
- this.$lastSuggestion = $lastSuggestion;
- },
- _renderPending: function renderPending(query) {
- var template = this.templates.pending;
- this._resetLastSuggestion();
- template && this.$el.html(template({
- query: query,
- dataset: this.name
- }));
- },
- _renderNotFound: function renderNotFound(query) {
- var template = this.templates.notFound;
- this._resetLastSuggestion();
- template && this.$el.html(template({
- query: query,
- dataset: this.name
- }));
- },
- _empty: function empty() {
- this.$el.empty();
- this._resetLastSuggestion();
- },
- _getSuggestionsFragment: function getSuggestionsFragment(query, suggestions) {
- var that = this, fragment;
- fragment = document.createDocumentFragment();
- _.each(suggestions, function getSuggestionNode(suggestion) {
- var $el, context;
- context = that._injectQuery(query, suggestion);
- $el = $(that.templates.suggestion(context)).data(keys.obj, suggestion).data(keys.val, that.displayFn(suggestion)).addClass(that.classes.suggestion + " " + that.classes.selectable);
- fragment.appendChild($el[0]);
- });
- this.highlight && highlight({
- className: this.classes.highlight,
- node: fragment,
- pattern: query
- });
- return $(fragment);
- },
- _getFooter: function getFooter(query, suggestions) {
- return this.templates.footer ? this.templates.footer({
- query: query,
- suggestions: suggestions,
- dataset: this.name
- }) : null;
- },
- _getHeader: function getHeader(query, suggestions) {
- return this.templates.header ? this.templates.header({
- query: query,
- suggestions: suggestions,
- dataset: this.name
- }) : null;
- },
- _resetLastSuggestion: function resetLastSuggestion() {
- this.$lastSuggestion = $();
- },
- _injectQuery: function injectQuery(query, obj) {
- return _.isObject(obj) ? _.mixin({
- _query: query
- }, obj) : obj;
- },
- update: function update(query) {
- var that = this, canceled = false, syncCalled = false, rendered = 0;
- this.cancel();
- this.cancel = function cancel() {
- canceled = true;
- that.cancel = $.noop;
- that.async && that.trigger("asyncCanceled", query);
- };
- this.source(query, sync, async);
- !syncCalled && sync([]);
- function sync(suggestions) {
- if (syncCalled) {
- return;
- }
- syncCalled = true;
- suggestions = (suggestions || []).slice(0, that.limit);
- rendered = suggestions.length;
- that._overwrite(query, suggestions);
- if (rendered < that.limit && that.async) {
- that.trigger("asyncRequested", query);
- }
- }
- function async(suggestions) {
- suggestions = suggestions || [];
- if (!canceled && rendered < that.limit) {
- that.cancel = $.noop;
- rendered += suggestions.length;
- that._append(query, suggestions.slice(0, that.limit - rendered));
- that.async && that.trigger("asyncReceived", query);
- }
- }
- },
- cancel: $.noop,
- clear: function clear() {
- this._empty();
- this.cancel();
- this.trigger("cleared");
- },
- isEmpty: function isEmpty() {
- return this.$el.is(":empty");
- },
- destroy: function destroy() {
- this.$el = $("<div>");
- }
- });
- return Dataset;
- function getDisplayFn(display) {
- display = display || _.stringify;
- return _.isFunction(display) ? display : displayFn;
- function displayFn(obj) {
- return obj[display];
- }
- }
- function getTemplates(templates, displayFn) {
- return {
- notFound: templates.notFound && _.templatify(templates.notFound),
- pending: templates.pending && _.templatify(templates.pending),
- header: templates.header && _.templatify(templates.header),
- footer: templates.footer && _.templatify(templates.footer),
- suggestion: templates.suggestion || suggestionTemplate
- };
- function suggestionTemplate(context) {
- return $("<div>").text(displayFn(context));
- }
- }
- function isValidName(str) {
- return /^[_a-zA-Z0-9-]+$/.test(str);
- }
- }();
- var Menu = function() {
- "use strict";
- function Menu(o, www) {
- var that = this;
- o = o || {};
- if (!o.node) {
- $.error("node is required");
- }
- www.mixin(this);
- this.$node = $(o.node);
- this.query = null;
- this.datasets = _.map(o.datasets, initializeDataset);
- function initializeDataset(oDataset) {
- var node = that.$node.find(oDataset.node).first();
- oDataset.node = node.length ? node : $("<div>").appendTo(that.$node);
- return new Dataset(oDataset, www);
- }
- }
- _.mixin(Menu.prototype, EventEmitter, {
- _onSelectableClick: function onSelectableClick($e) {
- this.trigger("selectableClicked", $($e.currentTarget));
- },
- _onRendered: function onRendered(type, dataset, suggestions, async) {
- this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty());
- this.trigger("datasetRendered", dataset, suggestions, async);
- },
- _onCleared: function onCleared() {
- this.$node.toggleClass(this.classes.empty, this._allDatasetsEmpty());
- this.trigger("datasetCleared");
- },
- _propagate: function propagate() {
- this.trigger.apply(this, arguments);
- },
- _allDatasetsEmpty: function allDatasetsEmpty() {
- return _.every(this.datasets, isDatasetEmpty);
- function isDatasetEmpty(dataset) {
- return dataset.isEmpty();
- }
- },
- _getSelectables: function getSelectables() {
- return this.$node.find(this.selectors.selectable);
- },
- _removeCursor: function _removeCursor() {
- var $selectable = this.getActiveSelectable();
- $selectable && $selectable.removeClass(this.classes.cursor);
- },
- _ensureVisible: function ensureVisible($el) {
- var elTop, elBottom, nodeScrollTop, nodeHeight;
- elTop = $el.position().top;
- elBottom = elTop + $el.outerHeight(true);
- nodeScrollTop = this.$node.scrollTop();
- nodeHeight = this.$node.height() + parseInt(this.$node.css("paddingTop"), 10) + parseInt(this.$node.css("paddingBottom"), 10);
- if (elTop < 0) {
- this.$node.scrollTop(nodeScrollTop + elTop);
- } else if (nodeHeight < elBottom) {
- this.$node.scrollTop(nodeScrollTop + (elBottom - nodeHeight));
- }
- },
- bind: function() {
- var that = this, onSelectableClick;
- onSelectableClick = _.bind(this._onSelectableClick, this);
- this.$node.on("click.tt", this.selectors.selectable, onSelectableClick);
- _.each(this.datasets, function(dataset) {
- dataset.onSync("asyncRequested", that._propagate, that).onSync("asyncCanceled", that._propagate, that).onSync("asyncReceived", that._propagate, that).onSync("rendered", that._onRendered, that).onSync("cleared", that._onCleared, that);
- });
- return this;
- },
- isOpen: function isOpen() {
- return this.$node.hasClass(this.classes.open);
- },
- open: function open() {
- this.$node.addClass(this.classes.open);
- },
- close: function close() {
- this.$node.removeClass(this.classes.open);
- this._removeCursor();
- },
- setLanguageDirection: function setLanguageDirection(dir) {
- this.$node.attr("dir", dir);
- },
- selectableRelativeToCursor: function selectableRelativeToCursor(delta) {
- var $selectables, $oldCursor, oldIndex, newIndex;
- $oldCursor = this.getActiveSelectable();
- $selectables = this._getSelectables();
- oldIndex = $oldCursor ? $selectables.index($oldCursor) : -1;
- newIndex = oldIndex + delta;
- newIndex = (newIndex + 1) % ($selectables.length + 1) - 1;
- newIndex = newIndex < -1 ? $selectables.length - 1 : newIndex;
- return newIndex === -1 ? null : $selectables.eq(newIndex);
- },
- setCursor: function setCursor($selectable) {
- this._removeCursor();
- if ($selectable = $selectable && $selectable.first()) {
- $selectable.addClass(this.classes.cursor);
- this._ensureVisible($selectable);
- }
- },
- getSelectableData: function getSelectableData($el) {
- return $el && $el.length ? Dataset.extractData($el) : null;
- },
- getActiveSelectable: function getActiveSelectable() {
- var $selectable = this._getSelectables().filter(this.selectors.cursor).first();
- return $selectable.length ? $selectable : null;
- },
- getTopSelectable: function getTopSelectable() {
- var $selectable = this._getSelectables().first();
- return $selectable.length ? $selectable : null;
- },
- update: function update(query) {
- var isValidUpdate = query !== this.query;
- if (isValidUpdate) {
- this.query = query;
- _.each(this.datasets, updateDataset);
- }
- return isValidUpdate;
- function updateDataset(dataset) {
- dataset.update(query);
- }
- },
- empty: function empty() {
- _.each(this.datasets, clearDataset);
- this.query = null;
- this.$node.addClass(this.classes.empty);
- function clearDataset(dataset) {
- dataset.clear();
- }
- },
- destroy: function destroy() {
- this.$node.off(".tt");
- this.$node = $("<div>");
- _.each(this.datasets, destroyDataset);
- function destroyDataset(dataset) {
- dataset.destroy();
- }
- }
- });
- return Menu;
- }();
- var DefaultMenu = function() {
- "use strict";
- var s = Menu.prototype;
- function DefaultMenu() {
- Menu.apply(this, [].slice.call(arguments, 0));
- }
- _.mixin(DefaultMenu.prototype, Menu.prototype, {
- open: function open() {
- !this._allDatasetsEmpty() && this._show();
- return s.open.apply(this, [].slice.call(arguments, 0));
- },
- close: function close() {
- this._hide();
- return s.close.apply(this, [].slice.call(arguments, 0));
- },
- _onRendered: function onRendered() {
- if (this._allDatasetsEmpty()) {
- this._hide();
- } else {
- this.isOpen() && this._show();
- }
- return s._onRendered.apply(this, [].slice.call(arguments, 0));
- },
- _onCleared: function onCleared() {
- if (this._allDatasetsEmpty()) {
- this._hide();
- } else {
- this.isOpen() && this._show();
- }
- return s._onCleared.apply(this, [].slice.call(arguments, 0));
- },
- setLanguageDirection: function setLanguageDirection(dir) {
- this.$node.css(dir === "ltr" ? this.css.ltr : this.css.rtl);
- return s.setLanguageDirection.apply(this, [].slice.call(arguments, 0));
- },
- _hide: function hide() {
- this.$node.hide();
- },
- _show: function show() {
- this.$node.css("display", "block");
- }
- });
- return DefaultMenu;
- }();
- var Typeahead = function() {
- "use strict";
- function Typeahead(o, www) {
- var onFocused, onBlurred, onEnterKeyed, onTabKeyed, onEscKeyed, onUpKeyed, onDownKeyed, onLeftKeyed, onRightKeyed, onQueryChanged, onWhitespaceChanged;
- o = o || {};
- if (!o.input) {
- $.error("missing input");
- }
- if (!o.menu) {
- $.error("missing menu");
- }
- if (!o.eventBus) {
- $.error("missing event bus");
- }
- www.mixin(this);
- this.eventBus = o.eventBus;
- this.minLength = _.isNumber(o.minLength) ? o.minLength : 1;
- this.input = o.input;
- this.menu = o.menu;
- this.enabled = true;
- this.active = false;
- this.input.hasFocus() && this.activate();
- this.dir = this.input.getLangDir();
- this._hacks();
- this.menu.bind().onSync("selectableClicked", this._onSelectableClicked, this).onSync("asyncRequested", this._onAsyncRequested, this).onSync("asyncCanceled", this._onAsyncCanceled, this).onSync("asyncReceived", this._onAsyncReceived, this).onSync("datasetRendered", this._onDatasetRendered, this).onSync("datasetCleared", this._onDatasetCleared, this);
- onFocused = c(this, "activate", "open", "_onFocused");
- onBlurred = c(this, "deactivate", "_onBlurred");
- onEnterKeyed = c(this, "isActive", "isOpen", "_onEnterKeyed");
- onTabKeyed = c(this, "isActive", "isOpen", "_onTabKeyed");
- onEscKeyed = c(this, "isActive", "_onEscKeyed");
- onUpKeyed = c(this, "isActive", "open", "_onUpKeyed");
- onDownKeyed = c(this, "isActive", "open", "_onDownKeyed");
- onLeftKeyed = c(this, "isActive", "isOpen", "_onLeftKeyed");
- onRightKeyed = c(this, "isActive", "isOpen", "_onRightKeyed");
- onQueryChanged = c(this, "_openIfActive", "_onQueryChanged");
- onWhitespaceChanged = c(this, "_openIfActive", "_onWhitespaceChanged");
- this.input.bind().onSync("focused", onFocused, this).onSync("blurred", onBlurred, this).onSync("enterKeyed", onEnterKeyed, this).onSync("tabKeyed", onTabKeyed, this).onSync("escKeyed", onEscKeyed, this).onSync("upKeyed", onUpKeyed, this).onSync("downKeyed", onDownKeyed, this).onSync("leftKeyed", onLeftKeyed, this).onSync("rightKeyed", onRightKeyed, this).onSync("queryChanged", onQueryChanged, this).onSync("whitespaceChanged", onWhitespaceChanged, this).onSync("langDirChanged", this._onLangDirChanged, this);
- }
- _.mixin(Typeahead.prototype, {
- _hacks: function hacks() {
- var $input, $menu;
- $input = this.input.$input || $("<div>");
- $menu = this.menu.$node || $("<div>");
- $input.on("blur.tt", function($e) {
- var active, isActive, hasActive;
- active = document.activeElement;
- isActive = $menu.is(active);
- hasActive = $menu.has(active).length > 0;
- if (_.isMsie() && (isActive || hasActive)) {
- $e.preventDefault();
- $e.stopImmediatePropagation();
- _.defer(function() {
- $input.focus();
- });
- }
- });
- $menu.on("mousedown.tt", function($e) {
- $e.preventDefault();
- });
- },
- _onSelectableClicked: function onSelectableClicked(type, $el) {
- this.select($el);
- },
- _onDatasetCleared: function onDatasetCleared() {
- this._updateHint();
- },
- _onDatasetRendered: function onDatasetRendered(type, dataset, suggestions, async) {
- this._updateHint();
- this.eventBus.trigger("render", suggestions, async, dataset);
- },
- _onAsyncRequested: function onAsyncRequested(type, dataset, query) {
- this.eventBus.trigger("asyncrequest", query, dataset);
- },
- _onAsyncCanceled: function onAsyncCanceled(type, dataset, query) {
- this.eventBus.trigger("asynccancel", query, dataset);
- },
- _onAsyncReceived: function onAsyncReceived(type, dataset, query) {
- this.eventBus.trigger("asyncreceive", query, dataset);
- },
- _onFocused: function onFocused() {
- this._minLengthMet() && this.menu.update(this.input.getQuery());
- },
- _onBlurred: function onBlurred() {
- if (this.input.hasQueryChangedSinceLastFocus()) {
- this.eventBus.trigger("change", this.input.getQuery());
- }
- },
- _onEnterKeyed: function onEnterKeyed(type, $e) {
- var $selectable;
- if ($selectable = this.menu.getActiveSelectable()) {
- this.select($selectable) && $e.preventDefault();
- }
- },
- _onTabKeyed: function onTabKeyed(type, $e) {
- var $selectable;
- if ($selectable = this.menu.getActiveSelectable()) {
- this.select($selectable) && $e.preventDefault();
- } else if ($selectable = this.menu.getTopSelectable()) {
- this.autocomplete($selectable) && $e.preventDefault();
- }
- },
- _onEscKeyed: function onEscKeyed() {
- this.close();
- },
- _onUpKeyed: function onUpKeyed() {
- this.moveCursor(-1);
- },
- _onDownKeyed: function onDownKeyed() {
- this.moveCursor(+1);
- },
- _onLeftKeyed: function onLeftKeyed() {
- if (this.dir === "rtl" && this.input.isCursorAtEnd()) {
- this.autocomplete(this.menu.getTopSelectable());
- }
- },
- _onRightKeyed: function onRightKeyed() {
- if (this.dir === "ltr" && this.input.isCursorAtEnd()) {
- this.autocomplete(this.menu.getTopSelectable());
- }
- },
- _onQueryChanged: function onQueryChanged(e, query) {
- this._minLengthMet(query) ? this.menu.update(query) : this.menu.empty();
- },
- _onWhitespaceChanged: function onWhitespaceChanged() {
- this._updateHint();
- },
- _onLangDirChanged: function onLangDirChanged(e, dir) {
- if (this.dir !== dir) {
- this.dir = dir;
- this.menu.setLanguageDirection(dir);
- }
- },
- _openIfActive: function openIfActive() {
- this.isActive() && this.open();
- },
- _minLengthMet: function minLengthMet(query) {
- query = _.isString(query) ? query : this.input.getQuery() || "";
- return query.length >= this.minLength;
- },
- _updateHint: function updateHint() {
- var $selectable, data, val, query, escapedQuery, frontMatchRegEx, match;
- $selectable = this.menu.getTopSelectable();
- data = this.menu.getSelectableData($selectable);
- val = this.input.getInputValue();
- if (data && !_.isBlankString(val) && !this.input.hasOverflow()) {
- query = Input.normalizeQuery(val);
- escapedQuery = _.escapeRegExChars(query);
- frontMatchRegEx = new RegExp("^(?:" + escapedQuery + ")(.+$)", "i");
- match = frontMatchRegEx.exec(data.val);
- match && this.input.setHint(val + match[1]);
- } else {
- this.input.clearHint();
- }
- },
- isEnabled: function isEnabled() {
- return this.enabled;
- },
- enable: function enable() {
- this.enabled = true;
- },
- disable: function disable() {
- this.enabled = false;
- },
- isActive: function isActive() {
- return this.active;
- },
- activate: function activate() {
- if (this.isActive()) {
- return true;
- } else if (!this.isEnabled() || this.eventBus.before("active")) {
- return false;
- } else {
- this.active = true;
- this.eventBus.trigger("active");
- return true;
- }
- },
- deactivate: function deactivate() {
- if (!this.isActive()) {
- return true;
- } else if (this.eventBus.before("idle")) {
- return false;
- } else {
- this.active = false;
- this.close();
- this.eventBus.trigger("idle");
- return true;
- }
- },
- isOpen: function isOpen() {
- return this.menu.isOpen();
- },
- open: function open() {
- if (!this.isOpen() && !this.eventBus.before("open")) {
- this.menu.open();
- this._updateHint();
- this.eventBus.trigger("open");
- }
- return this.isOpen();
- },
- close: function close() {
- if (this.isOpen() && !this.eventBus.before("close")) {
- this.menu.close();
- this.input.clearHint();
- this.input.resetInputValue();
- this.eventBus.trigger("close");
- }
- return !this.isOpen();
- },
- setVal: function setVal(val) {
- this.input.setQuery(_.toStr(val));
- },
- getVal: function getVal() {
- return this.input.getQuery();
- },
- select: function select($selectable) {
- var data = this.menu.getSelectableData($selectable);
- if (data && !this.eventBus.before("select", data.obj)) {
- this.input.setQuery(data.val, true);
- this.eventBus.trigger("select", data.obj);
- this.close();
- return true;
- }
- return false;
- },
- autocomplete: function autocomplete($selectable) {
- var query, data, isValid;
- query = this.input.getQuery();
- data = this.menu.getSelectableData($selectable);
- isValid = data && query !== data.val;
- if (isValid && !this.eventBus.before("autocomplete", data.obj)) {
- this.input.setQuery(data.val);
- this.eventBus.trigger("autocomplete", data.obj);
- return true;
- }
- return false;
- },
- moveCursor: function moveCursor(delta) {
- var query, $candidate, data, payload, cancelMove;
- query = this.input.getQuery();
- $candidate = this.menu.selectableRelativeToCursor(delta);
- data = this.menu.getSelectableData($candidate);
- payload = data ? data.obj : null;
- cancelMove = this._minLengthMet() && this.menu.update(query);
- if (!cancelMove && !this.eventBus.before("cursorchange", payload)) {
- this.menu.setCursor($candidate);
- if (data) {
- this.input.setInputValue(data.val);
- } else {
- this.input.resetInputValue();
- this._updateHint();
- }
- this.eventBus.trigger("cursorchange", payload);
- return true;
- }
- return false;
- },
- destroy: function destroy() {
- this.input.destroy();
- this.menu.destroy();
- }
- });
- return Typeahead;
- function c(ctx) {
- var methods = [].slice.call(arguments, 1);
- return function() {
- var args = [].slice.call(arguments);
- _.each(methods, function(method) {
- return ctx[method].apply(ctx, args);
- });
- };
- }
- }();
- (function() {
- "use strict";
- var old, keys, methods;
- old = $.fn.typeahead;
- keys = {
- www: "tt-www",
- attrs: "tt-attrs",
- typeahead: "tt-typeahead"
- };
- methods = {
- initialize: function initialize(o, datasets) {
- var www;
- datasets = _.isArray(datasets) ? datasets : [].slice.call(arguments, 1);
- o = o || {};
- www = WWW(o.classNames);
- return this.each(attach);
- function attach() {
- var $input, $wrapper, $hint, $menu, defaultHint, defaultMenu, eventBus, input, menu, typeahead, MenuConstructor;
- _.each(datasets, function(d) {
- d.highlight = !!o.highlight;
- });
- $input = $(this);
- $wrapper = $(www.html.wrapper);
- $hint = $elOrNull(o.hint);
- $menu = $elOrNull(o.menu);
- defaultHint = o.hint !== false && !$hint;
- defaultMenu = o.menu !== false && !$menu;
- defaultHint && ($hint = buildHintFromInput($input, www));
- defaultMenu && ($menu = $(www.html.menu).css(www.css.menu));
- $hint && $hint.val("");
- $input = prepInput($input, www);
- if (defaultHint || defaultMenu) {
- $wrapper.css(www.css.wrapper);
- $input.css(defaultHint ? www.css.input : www.css.inputWithNoHint);
- $input.wrap($wrapper).parent().prepend(defaultHint ? $hint : null).append(defaultMenu ? $menu : null);
- }
- MenuConstructor = defaultMenu ? DefaultMenu : Menu;
- eventBus = new EventBus({
- el: $input
- });
- input = new Input({
- hint: $hint,
- input: $input
- }, www);
- menu = new MenuConstructor({
- node: $menu,
- datasets: datasets
- }, www);
- typeahead = new Typeahead({
- input: input,
- menu: menu,
- eventBus: eventBus,
- minLength: o.minLength
- }, www);
- $input.data(keys.www, www);
- $input.data(keys.typeahead, typeahead);
- }
- },
- isEnabled: function isEnabled() {
- var enabled;
- ttEach(this.first(), function(t) {
- enabled = t.isEnabled();
- });
- return enabled;
- },
- enable: function enable() {
- ttEach(this, function(t) {
- t.enable();
- });
- return this;
- },
- disable: function disable() {
- ttEach(this, function(t) {
- t.disable();
- });
- return this;
- },
- isActive: function isActive() {
- var active;
- ttEach(this.first(), function(t) {
- active = t.isActive();
- });
- return active;
- },
- activate: function activate() {
- ttEach(this, function(t) {
- t.activate();
- });
- return this;
- },
- deactivate: function deactivate() {
- ttEach(this, function(t) {
- t.deactivate();
- });
- return this;
- },
- isOpen: function isOpen() {
- var open;
- ttEach(this.first(), function(t) {
- open = t.isOpen();
- });
- return open;
- },
- open: function open() {
- ttEach(this, function(t) {
- t.open();
- });
- return this;
- },
- close: function close() {
- ttEach(this, function(t) {
- t.close();
- });
- return this;
- },
- select: function select(el) {
- var success = false, $el = $(el);
- ttEach(this.first(), function(t) {
- success = t.select($el);
- });
- return success;
- },
- autocomplete: function autocomplete(el) {
- var success = false, $el = $(el);
- ttEach(this.first(), function(t) {
- success = t.autocomplete($el);
- });
- return success;
- },
- moveCursor: function moveCursoe(delta) {
- var success = false;
- ttEach(this.first(), function(t) {
- success = t.moveCursor(delta);
- });
- return success;
- },
- val: function val(newVal) {
- var query;
- if (!arguments.length) {
- ttEach(this.first(), function(t) {
- query = t.getVal();
- });
- return query;
- } else {
- ttEach(this, function(t) {
- t.setVal(newVal);
- });
- return this;
- }
- },
- destroy: function destroy() {
- ttEach(this, function(typeahead, $input) {
- revert($input);
- typeahead.destroy();
- });
- return this;
- }
- };
- $.fn.typeahead = function(method) {
- if (methods[method]) {
- return methods[method].apply(this, [].slice.call(arguments, 1));
- } else {
- return methods.initialize.apply(this, arguments);
- }
- };
- $.fn.typeahead.noConflict = function noConflict() {
- $.fn.typeahead = old;
- return this;
- };
- function ttEach($els, fn) {
- $els.each(function() {
- var $input = $(this), typeahead;
- (typeahead = $input.data(keys.typeahead)) && fn(typeahead, $input);
- });
- }
- function buildHintFromInput($input, www) {
- return $input.clone().addClass(www.classes.hint).removeData().css(www.css.hint).css(getBackgroundStyles($input)).prop("readonly", true).removeAttr("id name placeholder required").attr({
- autocomplete: "off",
- spellcheck: "false",
- tabindex: -1
- });
- }
- function prepInput($input, www) {
- $input.data(keys.attrs, {
- dir: $input.attr("dir"),
- autocomplete: $input.attr("autocomplete"),
- spellcheck: $input.attr("spellcheck"),
- style: $input.attr("style")
- });
- $input.addClass(www.classes.input).attr({
- autocomplete: "off",
- spellcheck: false
- });
- try {
- !$input.attr("dir") && $input.attr("dir", "auto");
- } catch (e) {}
- return $input;
- }
- function getBackgroundStyles($el) {
- return {
- backgroundAttachment: $el.css("background-attachment"),
- backgroundClip: $el.css("background-clip"),
- backgroundColor: $el.css("background-color"),
- backgroundImage: $el.css("background-image"),
- backgroundOrigin: $el.css("background-origin"),
- backgroundPosition: $el.css("background-position"),
- backgroundRepeat: $el.css("background-repeat"),
- backgroundSize: $el.css("background-size")
- };
- }
- function revert($input) {
- var www, $wrapper;
- www = $input.data(keys.www);
- $wrapper = $input.parent().filter(www.selectors.wrapper);
- _.each($input.data(keys.attrs), function(val, key) {
- _.isUndefined(val) ? $input.removeAttr(key) : $input.attr(key, val);
- });
- $input.removeData(keys.typeahead).removeData(keys.www).removeData(keys.attr).removeClass(www.classes.input);
- if ($wrapper.length) {
- $input.detach().insertAfter($wrapper);
- $wrapper.remove();
- }
- }
- function $elOrNull(obj) {
- var isValid, $el;
- isValid = _.isJQuery(obj) || _.isElement(obj);
- $el = isValid ? $(obj).first() : [];
- return $el.length ? $el : null;
- }
- })();
-});
\ No newline at end of file
+++ /dev/null
-__all__ = [
- 'arguments',
- 'example',
- 'keyword',
- 'seealso',
- 'table',
- 'underline'
-]
-
-
-class Parser:
- def __init__(self, pctxt):
- self.pctxt = pctxt
-
- def parse(self, line):
- return line
-
-class PContext:
- def __init__(self, templates = None):
- self.set_content_list([])
- self.templates = templates
-
- def set_content(self, content):
- self.set_content_list(content.split("\n"))
-
- def set_content_list(self, content):
- self.lines = content
- self.nblines = len(self.lines)
- self.i = 0
- self.stop = False
-
- def get_lines(self):
- return self.lines
-
- def eat_lines(self):
- count = 0
- while self.has_more_lines() and self.lines[self.i].strip():
- count += 1
- self.next()
- return count
-
- def eat_empty_lines(self):
- count = 0
- while self.has_more_lines() and not self.lines[self.i].strip():
- count += 1
- self.next()
- return count
-
- def next(self, count=1):
- self.i += count
-
- def has_more_lines(self, offset=0):
- return self.i + offset < self.nblines
-
- def get_line(self, offset=0):
- return self.lines[self.i + offset].rstrip()
-
-
-# Get the indentation of a line
-def get_indent(line):
- indent = 0
- length = len(line)
- while indent < length and line[indent] == ' ':
- indent += 1
- return indent
-
-
-# Remove unneeded indentation
-def remove_indent(list):
- # Detect the minimum indentation in the list
- min_indent = -1
- for line in list:
- if not line.strip():
- continue
- indent = get_indent(line)
- if min_indent < 0 or indent < min_indent:
- min_indent = indent
- # Realign the list content to remove the minimum indentation
- if min_indent > 0:
- for index, line in enumerate(list):
- list[index] = line[min_indent:]
+++ /dev/null
-import sys
-import re
-import parser
-
-'''
-TODO: Allow inner data parsing (this will allow to parse the examples provided in an arguments block)
-'''
-class Parser(parser.Parser):
- def __init__(self, pctxt):
- parser.Parser.__init__(self, pctxt)
- #template = pctxt.templates.get_template("parser/arguments.tpl")
- #self.replace = template.render().strip()
-
- def parse(self, line):
- #return re.sub(r'(Arguments *:)', self.replace, line)
- pctxt = self.pctxt
-
- result = re.search(r'(Arguments? *:)', line)
- if result:
- label = result.group(0)
- content = []
-
- desc_indent = False
- desc = re.sub(r'.*Arguments? *:', '', line).strip()
-
- indent = parser.get_indent(line)
-
- pctxt.next()
- pctxt.eat_empty_lines()
-
- arglines = []
- if desc != "none":
- add_empty_lines = 0
- while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) > indent):
- for j in xrange(0, add_empty_lines):
- arglines.append("")
- arglines.append(pctxt.get_line())
- pctxt.next()
- add_empty_lines = pctxt.eat_empty_lines()
- '''
- print line
-
- if parser.get_indent(line) == arg_indent:
- argument = re.sub(r' *([^ ]+).*', r'\1', line)
- if argument:
- #content.append("<b>%s</b>" % argument)
- arg_desc = [line.replace(argument, " " * len(self.unescape(argument)), 1)]
- #arg_desc = re.sub(r'( *)([^ ]+)(.*)', r'\1<b>\2</b>\3', line)
- arg_desc_indent = parser.get_indent(arg_desc[0])
- arg_desc[0] = arg_desc[0][arg_indent:]
- pctxt.next()
- add_empty_lines = 0
- while pctxt.has_more_lines and parser.get_indent(pctxt.get_line()) >= arg_indent:
- for i in xrange(0, add_empty_lines):
- arg_desc.append("")
- arg_desc.append(pctxt.get_line()[arg_indent:])
- pctxt.next()
- add_empty_lines = pctxt.eat_empty_lines()
- # TODO : reduce space at the beginnning
- content.append({
- 'name': argument,
- 'desc': arg_desc
- })
- '''
-
- if arglines:
- new_arglines = []
- #content = self.parse_args(arglines)
- parser.remove_indent(arglines)
- '''
- pctxt2 = parser.PContext(pctxt.templates)
- pctxt2.set_content_list(arglines)
- while pctxt2.has_more_lines():
- new_arglines.append(parser.example.Parser(pctxt2).parse(pctxt2.get_line()))
- pctxt2.next()
- arglines = new_arglines
- '''
-
- pctxt.stop = True
-
- template = pctxt.templates.get_template("parser/arguments.tpl")
- return template.render(
- pctxt=pctxt,
- label=label,
- desc=desc,
- content=arglines
- #content=content
- )
- return line
-
- return line
-
-'''
- def parse_args(self, data):
- args = []
-
- pctxt = parser.PContext()
- pctxt.set_content_list(data)
-
- while pctxt.has_more_lines():
- line = pctxt.get_line()
- arg_indent = parser.get_indent(line)
- argument = re.sub(r' *([^ ]+).*', r'\1', line)
- if True or argument:
- arg_desc = []
- trailing_desc = line.replace(argument, " " * len(self.unescape(argument)), 1)[arg_indent:]
- if trailing_desc.strip():
- arg_desc.append(trailing_desc)
- pctxt.next()
- add_empty_lines = 0
- while pctxt.has_more_lines() and parser.get_indent(pctxt.get_line()) > arg_indent:
- for i in xrange(0, add_empty_lines):
- arg_desc.append("")
- arg_desc.append(pctxt.get_line()[arg_indent:])
- pctxt.next()
- add_empty_lines = pctxt.eat_empty_lines()
-
- parser.remove_indent(arg_desc)
-
- args.append({
- 'name': argument,
- 'desc': arg_desc
- })
- return args
-
- def unescape(self, s):
- s = s.replace("<", "<")
- s = s.replace(">", ">")
- # this has to be last:
- s = s.replace("&", "&")
- return s
-'''
+++ /dev/null
-import re
-import parser
-
-# Detect examples blocks
-class Parser(parser.Parser):
- def __init__(self, pctxt):
- parser.Parser.__init__(self, pctxt)
- template = pctxt.templates.get_template("parser/example/comment.tpl")
- self.comment = template.render(pctxt=pctxt).strip()
-
-
- def parse(self, line):
- pctxt = self.pctxt
-
- result = re.search(r'^ *(Examples? *:)(.*)', line)
- if result:
- label = result.group(1)
-
- desc_indent = False
- desc = result.group(2).strip()
-
- # Some examples have a description
- if desc:
- desc_indent = len(line) - len(desc)
-
- indent = parser.get_indent(line)
-
- if desc:
- # And some description are on multiple lines
- while pctxt.get_line(1) and parser.get_indent(pctxt.get_line(1)) == desc_indent:
- desc += " " + pctxt.get_line(1).strip()
- pctxt.next()
-
- pctxt.next()
- add_empty_line = pctxt.eat_empty_lines()
-
- content = []
-
- if parser.get_indent(pctxt.get_line()) > indent:
- if desc:
- desc = desc[0].upper() + desc[1:]
- add_empty_line = 0
- while pctxt.has_more_lines() and ((not pctxt.get_line()) or (parser.get_indent(pctxt.get_line()) > indent)):
- if pctxt.get_line():
- for j in xrange(0, add_empty_line):
- content.append("")
-
- content.append(re.sub(r'(#.*)$', self.comment, pctxt.get_line()))
- add_empty_line = 0
- else:
- add_empty_line += 1
- pctxt.next()
- elif parser.get_indent(pctxt.get_line()) == indent:
- # Simple example that can't have empty lines
- if add_empty_line and desc:
- # This means that the example was on the same line as the 'Example' tag
- # and was not a description
- content.append(" " * indent + desc)
- desc = False
- else:
- while pctxt.has_more_lines() and (parser.get_indent(pctxt.get_line()) >= indent):
- content.append(pctxt.get_line())
- pctxt.next()
- pctxt.eat_empty_lines() # Skip empty remaining lines
-
- pctxt.stop = True
-
- parser.remove_indent(content)
-
- template = pctxt.templates.get_template("parser/example.tpl")
- return template.render(
- pctxt=pctxt,
- label=label,
- desc=desc,
- content=content
- )
- return line
+++ /dev/null
-import re
-import parser
-from urllib import quote
-
-class Parser(parser.Parser):
- def __init__(self, pctxt):
- parser.Parser.__init__(self, pctxt)
- self.keywordPattern = re.compile(r'^(%s%s)(%s)' % (
- '([a-z][a-z0-9\-\+_\.]*[a-z0-9\-\+_)])', # keyword
- '( [a-z0-9\-_]+)*', # subkeywords
- '(\([^ ]*\))?', # arg (ex: (<backend>), (<frontend>/<backend>), (<offset1>,<length>[,<offset2>]) ...
- ))
-
- def parse(self, line):
- pctxt = self.pctxt
- keywords = pctxt.keywords
- keywordsCount = pctxt.keywordsCount
- chapters = pctxt.chapters
-
- res = ""
-
- if line != "" and not re.match(r'^ ', line):
- parsed = self.keywordPattern.match(line)
- if parsed != None:
- keyword = parsed.group(1)
- arg = parsed.group(4)
- parameters = line[len(keyword) + len(arg):]
- if (parameters != "" and not re.match("^ +((<|\[|\{|/).*|(: [a-z +]+))?(\(deprecated\))?$", parameters)):
- # Dirty hack
- # - parameters should only start with the characer "<", "[", "{", "/"
- # - or a column (":") followed by a alpha keywords to identify fetching samples (optionally separated by the character "+")
- # - or the string "(deprecated)" at the end
- keyword = False
- else:
- splitKeyword = keyword.split(" ")
-
- parameters = arg + parameters
- else:
- keyword = False
-
- if keyword and (len(splitKeyword) <= 5):
- toplevel = pctxt.details["toplevel"]
- for j in xrange(0, len(splitKeyword)):
- subKeyword = " ".join(splitKeyword[0:j + 1])
- if subKeyword != "no":
- if not subKeyword in keywords:
- keywords[subKeyword] = set()
- keywords[subKeyword].add(pctxt.details["chapter"])
- res += '<a class="anchor" name="%s"></a>' % subKeyword
- res += '<a class="anchor" name="%s-%s"></a>' % (toplevel, subKeyword)
- res += '<a class="anchor" name="%s-%s"></a>' % (pctxt.details["chapter"], subKeyword)
- res += '<a class="anchor" name="%s (%s)"></a>' % (subKeyword, chapters[toplevel]['title'])
- res += '<a class="anchor" name="%s (%s)"></a>' % (subKeyword, chapters[pctxt.details["chapter"]]['title'])
-
- deprecated = parameters.find("(deprecated)")
- if deprecated != -1:
- prefix = ""
- suffix = ""
- parameters = parameters.replace("(deprecated)", '<span class="label label-warning">(deprecated)</span>')
- else:
- prefix = ""
- suffix = ""
-
- nextline = pctxt.get_line(1)
-
- while nextline.startswith(" "):
- # Found parameters on the next line
- parameters += "\n" + nextline
- pctxt.next()
- if pctxt.has_more_lines(1):
- nextline = pctxt.get_line(1)
- else:
- nextline = ""
-
-
- parameters = self.colorize(parameters)
- res += '<div class="keyword">%s<b><a class="anchor" name="%s"></a><a href="#%s">%s</a></b>%s%s</div>' % (prefix, keyword, quote("%s-%s" % (pctxt.details["chapter"], keyword)), keyword, parameters, suffix)
- pctxt.next()
- pctxt.stop = True
- elif line.startswith("/*"):
- # Skip comments in the documentation
- while not pctxt.get_line().endswith("*/"):
- pctxt.next()
- pctxt.next()
- else:
- # This is probably not a keyword but a text, ignore it
- res += line
- else:
- res += line
-
- return res
-
- # Used to colorize keywords parameters
- # TODO : use CSS styling
- def colorize(self, text):
- colorized = ""
- tags = [
- [ "[" , "]" , "#008" ],
- [ "{" , "}" , "#800" ],
- [ "<", ">", "#080" ],
- ]
- heap = []
- pos = 0
- while pos < len(text):
- substring = text[pos:]
- found = False
- for tag in tags:
- if substring.startswith(tag[0]):
- # Opening tag
- heap.append(tag)
- colorized += '<span style="color: %s">%s' % (tag[2], substring[0:len(tag[0])])
- pos += len(tag[0])
- found = True
- break
- elif substring.startswith(tag[1]):
- # Closing tag
-
- # pop opening tags until the corresponding one is found
- openingTag = False
- while heap and openingTag != tag:
- openingTag = heap.pop()
- if openingTag != tag:
- colorized += '</span>'
- # all intermediate tags are now closed, we can display the tag
- colorized += substring[0:len(tag[1])]
- # and the close it if it was previously opened
- if openingTag == tag:
- colorized += '</span>'
- pos += len(tag[1])
- found = True
- break
- if not found:
- colorized += substring[0]
- pos += 1
- # close all unterminated tags
- while heap:
- tag = heap.pop()
- colorized += '</span>'
-
- return colorized
-
-
+++ /dev/null
-import re
-import parser
-
-class Parser(parser.Parser):
- def parse(self, line):
- pctxt = self.pctxt
-
- result = re.search(r'(See also *:)', line)
- if result:
- label = result.group(0)
-
- desc = re.sub(r'.*See also *:', '', line).strip()
-
- indent = parser.get_indent(line)
-
- # Some descriptions are on multiple lines
- while pctxt.has_more_lines(1) and parser.get_indent(pctxt.get_line(1)) >= indent:
- desc += " " + pctxt.get_line(1).strip()
- pctxt.next()
-
- pctxt.eat_empty_lines()
- pctxt.next()
- pctxt.stop = True
-
- template = pctxt.templates.get_template("parser/seealso.tpl")
- return template.render(
- pctxt=pctxt,
- label=label,
- desc=desc,
- )
-
- return line
+++ /dev/null
-import re
-import sys
-import parser
-
-class Parser(parser.Parser):
- def __init__(self, pctxt):
- parser.Parser.__init__(self, pctxt)
- self.table1Pattern = re.compile(r'^ *(-+\+)+-+')
- self.table2Pattern = re.compile(r'^ *\+(-+\+)+')
-
- def parse(self, line):
- global document, keywords, keywordsCount, chapters, keyword_conflicts
-
- pctxt = self.pctxt
-
- if pctxt.context['headers']['subtitle'] != 'Configuration Manual':
- # Quick exit
- return line
- elif pctxt.details['chapter'] == "4":
- # BUG: the matrix in chapter 4. Proxies is not well displayed, we skip this chapter
- return line
-
- if pctxt.has_more_lines(1):
- nextline = pctxt.get_line(1)
- else:
- nextline = ""
-
- if self.table1Pattern.match(nextline):
- # activate table rendering only for the Configuration Manual
- lineSeparator = nextline
- nbColumns = nextline.count("+") + 1
- extraColumns = 0
- print >> sys.stderr, "Entering table mode (%d columns)" % nbColumns
- table = []
- if line.find("|") != -1:
- row = []
- while pctxt.has_more_lines():
- line = pctxt.get_line()
- if pctxt.has_more_lines(1):
- nextline = pctxt.get_line(1)
- else:
- nextline = ""
- if line == lineSeparator:
- # New row
- table.append(row)
- row = []
- if nextline.find("|") == -1:
- break # End of table
- else:
- # Data
- columns = line.split("|")
- for j in xrange(0, len(columns)):
- try:
- if row[j]:
- row[j] += "<br />"
- row[j] += columns[j].strip()
- except:
- row.append(columns[j].strip())
- pctxt.next()
- else:
- row = []
- headers = nextline
- while pctxt.has_more_lines():
- line = pctxt.get_line()
- if pctxt.has_more_lines(1):
- nextline = pctxt.get_line(1)
- else:
- nextline = ""
-
- if nextline == "":
- if row: table.append(row)
- break # End of table
-
- if (line != lineSeparator) and (line[0] != "-"):
- start = 0
-
- if row and not line.startswith(" "):
- # Row is complete, parse a new one
- table.append(row)
- row = []
-
- tmprow = []
- while start != -1:
- end = headers.find("+", start)
- if end == -1:
- end = len(headers)
-
- realend = end
- if realend == len(headers):
- realend = len(line)
- else:
- while realend < len(line) and line[realend] != " ":
- realend += 1
- end += 1
-
- tmprow.append(line[start:realend])
-
- start = end + 1
- if start >= len(headers):
- start = -1
- for j in xrange(0, nbColumns):
- try:
- row[j] += tmprow[j].strip()
- except:
- row.append(tmprow[j].strip())
-
- deprecated = row[0].endswith("(deprecated)")
- if deprecated:
- row[0] = row[0][: -len("(deprecated)")].rstrip()
-
- nooption = row[1].startswith("(*)")
- if nooption:
- row[1] = row[1][len("(*)"):].strip()
-
- if deprecated or nooption:
- extraColumns = 1
- extra = ""
- if deprecated:
- extra += '<span class="label label-warning">(deprecated)</span>'
- if nooption:
- extra += '<span>(*)</span>'
- row.append(extra)
-
- pctxt.next()
- print >> sys.stderr, "Leaving table mode"
- pctxt.next() # skip useless next line
- pctxt.stop = True
-
- return self.renderTable(table, nbColumns, pctxt.details["toplevel"])
- # elif self.table2Pattern.match(line):
- # return self.parse_table_format2()
- elif line.find("May be used in sections") != -1:
- nextline = pctxt.get_line(1)
- rows = []
- headers = line.split(":")
- rows.append(headers[1].split("|"))
- rows.append(nextline.split("|"))
- table = {
- "rows": rows,
- "title": headers[0]
- }
- pctxt.next(2) # skip this previous table
- pctxt.stop = True
-
- return self.renderTable(table)
-
- return line
-
-
- def parse_table_format2(self):
- pctxt = self.pctxt
-
- linesep = pctxt.get_line()
- rows = []
-
- pctxt.next()
- maxcols = 0
- while pctxt.get_line().strip().startswith("|"):
- row = pctxt.get_line().strip()[1:-1].split("|")
- rows.append(row)
- maxcols = max(maxcols, len(row))
- pctxt.next()
- if pctxt.get_line() == linesep:
- # TODO : find a way to define a special style for next row
- pctxt.next()
- pctxt.stop = True
-
- return self.renderTable(rows, maxcols)
-
- # Render tables detected by the conversion parser
- def renderTable(self, table, maxColumns = 0, toplevel = None):
- pctxt = self.pctxt
- template = pctxt.templates.get_template("parser/table.tpl")
-
- res = ""
-
- title = None
- if isinstance(table, dict):
- title = table["title"]
- table = table["rows"]
-
- if not maxColumns:
- maxColumns = len(table[0])
-
- rows = []
-
- mode = "th"
- headerLine = ""
- hasKeywords = False
- i = 0
- for row in table:
- line = ""
-
- if i == 0:
- row_template = pctxt.templates.get_template("parser/table/header.tpl")
- else:
- row_template = pctxt.templates.get_template("parser/table/row.tpl")
-
- if i > 1 and (i - 1) % 20 == 0 and len(table) > 50:
- # Repeat headers periodically for long tables
- rows.append(headerLine)
-
- j = 0
- cols = []
- for column in row:
- if j >= maxColumns:
- break
-
- tplcol = {}
-
- data = column.strip()
- keyword = column
- if j == 0 and i == 0 and keyword == 'keyword':
- hasKeywords = True
- if j == 0 and i != 0 and hasKeywords:
- if keyword.startswith("[no] "):
- keyword = keyword[len("[no] "):]
- tplcol['toplevel'] = toplevel
- tplcol['keyword'] = keyword
- tplcol['extra'] = []
- if j == 0 and len(row) > maxColumns:
- for k in xrange(maxColumns, len(row)):
- tplcol['extra'].append(row[k])
- tplcol['data'] = data
- cols.append(tplcol)
- j += 1
- mode = "td"
-
- line = row_template.render(
- pctxt=pctxt,
- columns=cols
- ).strip()
- if i == 0:
- headerLine = line
-
- rows.append(line)
-
- i += 1
-
- return template.render(
- pctxt=pctxt,
- title=title,
- rows=rows,
- )
+++ /dev/null
-import parser
-
-class Parser(parser.Parser):
- # Detect underlines
- def parse(self, line):
- pctxt = self.pctxt
- if pctxt.has_more_lines(1):
- nextline = pctxt.get_line(1)
- if (len(line) > 0) and (len(nextline) > 0) and (nextline[0] == '-') and ("-" * len(line) == nextline):
- template = pctxt.templates.get_template("parser/underline.tpl")
- line = template.render(pctxt=pctxt, data=line).strip()
- pctxt.next(2)
- pctxt.eat_empty_lines()
- pctxt.stop = True
-
- return line
+++ /dev/null
-<div class="separator">
-<span class="label label-info">${label}</span>\
-% if desc:
- ${desc}
-% endif
-% if content:
-<pre class="prettyprint arguments">${"\n".join(content)}</pre>
-% endif
-</div>
+++ /dev/null
-<div class="separator">
-<span class="label label-success">${label}</span>
-<pre class="prettyprint">
-% if desc:
-<div class="example-desc">${desc}</div>\
-% endif
-<code>\
-% for line in content:
-${line}
-% endfor
-</code></pre>
-</div>
\ No newline at end of file
+++ /dev/null
-<span class="comment">\1</span>
\ No newline at end of file
+++ /dev/null
-<div class="page-header"><b>${label}</b> ${desc}</div>
+++ /dev/null
-% if title:
-<div><p>${title} :</p>\
-% endif
-<table class="table table-bordered" border="0" cellspacing="0" cellpadding="0">
-% for row in rows:
-${row}
-% endfor
-</table>\
-% if title:
-</div>
-% endif
\ No newline at end of file
+++ /dev/null
-<thead><tr>\
-% for col in columns:
-<% data = col['data'] %>\
-<th>${data}</th>\
-% endfor
-</tr></thead>
+++ /dev/null
-<% from urllib import quote %>
-<% base = pctxt.context['base'] %>
-<tr>\
-% for col in columns:
-<% data = col['data'] %>\
-<%
- if data in ['yes']:
- style = "class=\"alert-success pagination-centered\""
- data = 'yes<br /><img src="%scss/check.png" alt="yes" title="yes" />' % base
- elif data in ['no']:
- style = "class=\"alert-error pagination-centered\""
- data = 'no<br /><img src="%scss/cross.png" alt="no" title="no" />' % base
- elif data in ['X']:
- style = "class=\"pagination-centered\""
- data = '<img src="%scss/check.png" alt="X" title="yes" />' % base
- elif data in ['-']:
- style = "class=\"pagination-centered\""
- data = ' '
- elif data in ['*']:
- style = "class=\"pagination-centered\""
- else:
- style = None
-%>\
-<td ${style}>\
-% if "keyword" in col:
-<a href="#${quote("%s-%s" % (col['toplevel'], col['keyword']))}">\
-% for extra in col['extra']:
-<span class="pull-right">${extra}</span>\
-% endfor
-${data}</a>\
-% else:
-${data}\
-% endif
-</td>\
-% endfor
-</tr>
+++ /dev/null
-<h5>${data}</h5>
+++ /dev/null
-<a class="anchor" id="summary" name="summary"></a>
-<div class="page-header">
- <h1 id="chapter-summary" data-target="summary">Summary</h1>
-</div>
-<div class="row">
- <div class="col-md-6">
- <% previousLevel = None %>
- % for k in chapterIndexes:
- <% chapter = chapters[k] %>
- % if chapter['title']:
- <%
- if chapter['level'] == 1:
- otag = "<b>"
- etag = "</b>"
- else:
- otag = etag = ""
- %>
- % if chapter['chapter'] == '7':
- ## Quick and dirty hack to split the summary in 2 columns
- ## TODO : implement a generic way split the summary
- </div><div class="col-md-6">
- <% previousLevel = None %>
- % endif
- % if otag and previousLevel:
- <br />
- % endif
- <div class="row">
- <div class="col-md-2 pagination-right noheight">${otag}<small>${chapter['chapter']}.</small>${etag}</div>
- <div class="col-md-10 noheight">
- % for tab in range(1, chapter['level']):
- <div class="tab">
- % endfor
- <a href="#${chapter['chapter']}">${otag}${chapter['title']}${etag}</a>
- % for tab in range(1, chapter['level']):
- </div>
- % endfor
- </div>
- </div>
- <% previousLevel = chapter['level'] %>
- % endif
- % endfor
- </div>
-</div>
+++ /dev/null
-<!DOCTYPE html>
-<html lang="en">
- <head>
- <meta charset="utf-8" />
- <title>${headers['title']} ${headers['version']} - ${headers['subtitle']}</title>
- <link href="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet" />
- <link href="${base}css/page.css?${version}" rel="stylesheet" />
- </head>
- <body>
- <nav class="navbar navbar-default navbar-fixed-top" role="navigation">
- <div class="navbar-header">
- <button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#menu">
- <span class="sr-only">Toggle navigation</span>
- <span class="icon-bar"></span>
- <span class="icon-bar"></span>
- <span class="icon-bar"></span>
- </button>
- <a class="navbar-brand" href="${base}index.html">${headers['title']} <small>${headers['subtitle']}</small></a>
- </div>
- <!-- /.navbar-header -->
-
- <!-- Collect the nav links, forms, and other content for toggling -->
- <div class="collapse navbar-collapse" id="menu">
- <ul class="nav navbar-nav">
- <li><a href="http://www.haproxy.org/">HAProxy home page</a></li>
- <li class="dropdown">
- <a href="#" class="dropdown-toggle" data-toggle="dropdown">Versions <b class="caret"></b></a>
- <ul class="dropdown-menu">
- ## TODO : provide a structure to dynamically generate per version links
- <li class="dropdown-header">HAProxy 1.4</li>
- <li><a href="${base}configuration-1.4.html">Configuration Manual <small>(stable)</small></a></li>
- <li><a href="${base}snapshot/configuration-1.4.html">Configuration Manual <small>(snapshot)</small></a></li>
- <li><a href="http://git.1wt.eu/git/haproxy-1.4.git/">GIT Repository</a></li>
- <li><a href="http://www.haproxy.org/git/?p=haproxy-1.4.git">Browse repository</a></li>
- <li><a href="http://www.haproxy.org/download/1.4/">Browse directory</a></li>
- <li class="divider"></li>
- <li class="dropdown-header">HAProxy 1.5</li>
- <li><a href="${base}configuration-1.5.html">Configuration Manual <small>(stable)</small></a></li>
- <li><a href="${base}snapshot/configuration-1.5.html">Configuration Manual <small>(snapshot)</small></a></li>
- <li><a href="http://git.1wt.eu/git/haproxy-1.5.git/">GIT Repository</a></li>
- <li><a href="http://www.haproxy.org/git/?p=haproxy-1.5.git">Browse repository</a></li>
- <li><a href="http://www.haproxy.org/download/1.5/">Browse directory</a></li>
- <li class="divider"></li>
- <li class="dropdown-header">HAProxy 1.6</li>
- <li><a href="${base}configuration-1.6.html">Configuration Manual <small>(stable)</small></a></li>
- <li><a href="${base}snapshot/configuration-1.6.html">Configuration Manual <small>(snapshot)</small></a></li>
- <li><a href="${base}intro-1.6.html">Starter Guide <small>(stable)</small></a></li>
- <li><a href="${base}snapshot/intro-1.6.html">Starter Guide <small>(snapshot)</small></a></li>
- <li><a href="http://git.1wt.eu/git/haproxy.git/">GIT Repository</a></li>
- <li><a href="http://www.haproxy.org/git/?p=haproxy.git">Browse repository</a></li>
- <li><a href="http://www.haproxy.org/download/1.6/">Browse directory</a></li>
- </ul>
- </li>
- </ul>
- </div>
- </nav>
- <!-- /.navbar-static-side -->
-
- <div id="wrapper">
-
- <div id="sidebar">
- <form onsubmit="search(this.keyword.value); return false" role="form">
- <div id="searchKeyword" class="form-group">
- <input type="text" class="form-control typeahead" id="keyword" name="keyword" placeholder="Search..." autocomplete="off">
- </div>
- </form>
- <p>
- Keyboard navigation : <span id="keyboardNavStatus"></span>
- </p>
- <p>
- When enabled, you can use <strong>left</strong> and <strong>right</strong> arrow keys to navigate between chapters.<br>
- The feature is automatically disabled when the search field is focused.
- </p>
- <p class="text-right">
- <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a> v<b>${version}</b> on <b>${date}</b></small>
- </p>
- </div>
- <!-- /.sidebar -->
-
- <div id="page-wrapper">
- <div class="row">
- <div class="col-lg-12">
- <div class="text-center">
- <h1>${headers['title']}</h1>
- <h2>${headers['subtitle']}</h2>
- <p><strong>${headers['version']}</strong></p>
- <p>
- <a href="http://www.haproxy.org/" title="HAProxy Home Page"><img src="${base}img/logo-med.png" /></a><br>
- ${headers['author']}<br>
- ${headers['date']}
- </p>
- </div>
-
- ${document}
- <br>
- <hr>
- <div class="text-right">
- ${headers['title']} ${headers['version'].replace("version ", "")} – ${headers['subtitle']}<br>
- <small>${headers['date']}, ${headers['author']}</small>
- </div>
- </div>
- <!-- /.col-lg-12 -->
- </div>
- <!-- /.row -->
- <div style="position: fixed; z-index: 1000; bottom: 0; left: 0; right: 0; padding: 10px">
- <ul class="pager" style="margin: 0">
- <li class="previous"><a id="previous" href="#"></a></li>
- <li class="next"><a id="next" href="#"></a></li>
- </ul>
- </div>
- </div>
- <!-- /#page-wrapper -->
-
- </div>
- <!-- /#wrapper -->
-
- <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
- <script src="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/js/bootstrap.min.js"></script>
- <script src="//cdnjs.cloudflare.com/ajax/libs/typeahead.js/0.11.1/typeahead.bundle.min.js"></script>
- <script>
- /* Keyword search */
- var searchFocus = false
- var keywords = [
- "${'",\n\t\t\t\t"'.join(keywords)}"
- ]
-
- function updateKeyboardNavStatus() {
- var status = searchFocus ? '<span class="label label-disabled">Disabled</span>' : '<span class="label label-success">Enabled</span>'
- $('#keyboardNavStatus').html(status)
- }
-
- function search(keyword) {
- if (keyword && !!~$.inArray(keyword, keywords)) {
- window.location.hash = keyword
- }
- }
- // constructs the suggestion engine
- var kwbh = new Bloodhound({
- datumTokenizer: Bloodhound.tokenizers.obj.whitespace('value'),
- queryTokenizer: Bloodhound.tokenizers.whitespace,
- local: $.map(keywords, function(keyword) { return { value: keyword }; })
- });
- kwbh.initialize()
-
- $('#searchKeyword .typeahead').typeahead({
- hint: true,
- highlight: true,
- minLength: 1,
- autoselect: true
- },
- {
- name: 'keywords',
- displayKey: 'value',
- limit: keywords.length,
- source: kwbh.ttAdapter()
- }).focus(function() {
- searchFocus = true
- updateKeyboardNavStatus()
- }).blur(function() {
- searchFocus = false
- updateKeyboardNavStatus()
- }).bind('typeahead:selected', function ($e, datum) {
- search(datum.value)
- })
-
- /* EXPERIMENTAL - Previous/Next navigation */
- var headings = $(":header")
- var previousTarget = false
- var nextTarget = false
- var $previous = $('#previous')
- var $next = $('#next')
- function refreshNavigation() {
- var previous = false
- var next = false
- $.each(headings, function(item, value) {
- var el = $(value)
-
- // TODO : avoid target recalculation on each refresh
- var target = el.attr('data-target')
- if (! target) return true
-
- var target_el = $('#' + target.replace(/\./, "\\."))
- if (! target_el.attr('id')) return true
-
- if (target_el.offset().top < $(window).scrollTop()) {
- previous = el
- }
- if (target_el.offset().top - 1 > $(window).scrollTop()) {
- next = el
- }
- if (next) return false
- })
-
- previousTarget = previous ? previous.attr('data-target') : 'top'
- $previous.html(
- previous && previousTarget ?
- '<span class="glyphicon glyphicon-arrow-left"></span> ' + previous.text() :
- '<span class="glyphicon glyphicon-arrow-up"></span> Top'
- ).attr('href', '#' + previousTarget)
-
- nextTarget = next ? next.attr('data-target') : 'bottom'
- $next.html(
- next && nextTarget ?
- next.text() + ' <span class="glyphicon glyphicon-arrow-right"></span>' :
- 'Bottom <span class="glyphicon glyphicon-arrow-down"></span>'
- ).attr('href', '#' + nextTarget)
- }
-
- $(window).scroll(function () {
- refreshNavigation()
- });
- $(document).ready(function() {
- refreshNavigation()
- updateKeyboardNavStatus()
- });
-
- /* EXPERIMENTAL - Enable keyboard navigation */
- $(document).keydown(function(e){
- if (searchFocus) return
-
- switch(e.which) {
- case 37: // left
- window.location.hash = previousTarget ? previousTarget : 'top'
- break
-
- case 39: // right
- window.location.hash = nextTarget ? nextTarget : 'bottom'
- break
-
- default: return // exit this handler for other keys
- }
- e.preventDefault()
- })
- </script>
- ${footer}
- <a class="anchor" name="bottom"></a>
- </body>
-</html>
+++ /dev/null
-#!/bin/bash
-
-PROJECT_HOME=$(dirname $(readlink -f $0))
-cd $PROJECT_HOME || exit 1
-
-WORK_DIR=$PROJECT_HOME/work
-
-function on_exit()
-{
- echo "-- END $(date)"
-}
-
-function init()
-{
- trap on_exit EXIT
-
- echo
- echo "-- START $(date)"
- echo "PROJECT_HOME = $PROJECT_HOME"
-
- echo "Preparing work directories..."
- mkdir -p $WORK_DIR || exit 1
- mkdir -p $WORK_DIR/haproxy || exit 1
- mkdir -p $WORK_DIR/haproxy-dconv || exit 1
-
- UPDATED=0
- PUSH=0
-
-}
-
-# Needed as "git -C" is only available since git 1.8.5
-function git-C()
-{
- _gitpath=$1
- shift
- echo "git --git-dir=$_gitpath/.git --work-tree=$_gitpath $@" >&2
- git --git-dir=$_gitpath/.git --work-tree=$_gitpath "$@"
-}
-
-function fetch_haproxy_dconv()
-{
- echo "Fetching latest haproxy-dconv public version..."
- if [ ! -e $WORK_DIR/haproxy-dconv/master ];
- then
- git clone -v git://github.com/cbonte/haproxy-dconv.git $WORK_DIR/haproxy-dconv/master || exit 1
- fi
- GIT="git-C $WORK_DIR/haproxy-dconv/master"
-
- OLD_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)"
- $GIT checkout master && $GIT pull -v
- version=$($GIT describe --tags)
- version=${version%-g*}
- NEW_MD5="$($GIT log -1 | md5sum) $($GIT describe --tags)"
- if [ "$OLD_MD5" != "$NEW_MD5" ];
- then
- UPDATED=1
- fi
-
- echo "Fetching last haproxy-dconv public pages version..."
- if [ ! -e $WORK_DIR/haproxy-dconv/gh-pages ];
- then
- cp -a $WORK_DIR/haproxy-dconv/master $WORK_DIR/haproxy-dconv/gh-pages || exit 1
- fi
- GIT="git-C $WORK_DIR/haproxy-dconv/gh-pages"
-
- $GIT checkout gh-pages && $GIT pull -v
-}
-
-function fetch_haproxy()
-{
- url=$1
- path=$2
-
- echo "Fetching HAProxy 1.4 repository..."
- if [ ! -e $path ];
- then
- git clone -v $url $path || exit 1
- fi
- GIT="git-C $path"
-
- $GIT checkout master && $GIT pull -v
-}
-
-function _generate_file()
-{
- infile=$1
- destfile=$2
- git_version=$3
- state=$4
-
- $GIT checkout $git_version
-
- if [ -e $gitpath/doc/$infile ];
- then
-
- git_version_simple=${git_version%-g*}
- doc_version=$(tail -n1 $destfile 2>/dev/null | grep " git:" | sed 's/.* git:\([^ ]*\).*/\1/')
- if [ $UPDATED -eq 1 -o "$git_version" != "$doc_version" ];
- then
- HTAG="VERSION-$(basename $gitpath | sed 's/[.]/\\&/g')"
- if [ "$state" == "snapshot" ];
- then
- base=".."
- HTAG="$HTAG-SNAPSHOT"
- else
- base="."
- fi
-
-
- $WORK_DIR/haproxy-dconv/master/haproxy-dconv.py -i $gitpath/doc/$infile -o $destfile --base=$base &&
- echo "<!-- git:$git_version -->" >> $destfile &&
- sed -i "s/\(<\!-- $HTAG -->\)\(.*\)\(<\!-- \/$HTAG -->\)/\1${git_version_simple}\3/" $docroot/index.html
-
- else
- echo "Already up to date."
- fi
-
- if [ "$doc_version" != "" -a "$git_version" != "$doc_version" ];
- then
- changelog=$($GIT log --oneline $doc_version..$git_version $gitpath/doc/$infile)
- else
- changelog=""
- fi
-
- GITDOC="git-C $docroot"
- if [ "$($GITDOC status -s $destfile)" != "" ];
- then
- $GITDOC add $destfile &&
- $GITDOC commit -m "Updating HAProxy $state $infile ${git_version_simple} generated by haproxy-dconv $version" -m "$changelog" $destfile $docroot/index.html &&
- PUSH=1
- fi
- fi
-}
-
-function generate_docs()
-{
- url=$1
- gitpath=$2
- docroot=$3
- infile=$4
- outfile=$5
-
- fetch_haproxy $url $gitpath
-
- GIT="git-C $gitpath"
-
- $GIT checkout master
- git_version=$($GIT describe --tags --match 'v*')
- git_version_stable=${git_version%-*-g*}
-
- echo "Generating snapshot version $git_version..."
- _generate_file $infile $docroot/snapshot/$outfile $git_version snapshot
-
- echo "Generating stable version $git_version..."
- _generate_file $infile $docroot/$outfile $git_version_stable stable
-}
-
-function push()
-{
- docroot=$1
- GITDOC="git-C $docroot"
-
- if [ $PUSH -eq 1 ];
- then
- $GITDOC push origin gh-pages
- fi
-
-}
-
-
-init
-fetch_haproxy_dconv
-generate_docs http://git.1wt.eu/git/haproxy-1.4.git/ $WORK_DIR/haproxy/1.4 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.4.html
-generate_docs http://git.1wt.eu/git/haproxy-1.5.git/ $WORK_DIR/haproxy/1.5 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.5.html
-generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages configuration.txt configuration-1.6.html
-generate_docs http://git.1wt.eu/git/haproxy.git/ $WORK_DIR/haproxy/1.6 $WORK_DIR/haproxy-dconv/gh-pages intro.txt intro-1.6.html
-push $WORK_DIR/haproxy-dconv/gh-pages
+++ /dev/null
-[DEFAULT]
-pristine-tar = True
-upstream-branch = upstream-1.6
-debian-branch = master
+++ /dev/null
-.TH HALOG "1" "July 2013" "halog" "User Commands"
-.SH NAME
-halog \- HAProxy log statistics reporter
-.SH SYNOPSIS
-.B halog
-[\fI-h|--help\fR]
-.br
-.B halog
-[\fIoptions\fR] <LOGFILE
-.SH DESCRIPTION
-.B halog
-reads HAProxy log data from stdin and extracts and displays lines matching
-user-specified criteria.
-.SH OPTIONS
-.SS Input filters \fR(several filters may be combined)
-.TP
-\fB\-H\fR
-Only match lines containing HTTP logs (ignore TCP)
-.TP
-\fB\-E\fR
-Only match lines without any error (no 5xx status)
-.TP
-\fB\-e\fR
-Only match lines with errors (status 5xx or negative)
-.TP
-\fB\-rt\fR|\fB\-RT\fR <time>
-Only match response times larger|smaller than <time>
-.TP
-\fB\-Q\fR|\fB\-QS\fR
-Only match queued requests (any queue|server queue)
-.TP
-\fB\-tcn\fR|\fB\-TCN\fR <code>
-Only match requests with/without termination code <code>
-.TP
-\fB\-hs\fR|\fB\-HS\fR <[min][:][max]>
-Only match requests with HTTP status codes within/not within min..max. Any of
-them may be omitted. Exact code is checked for if no ':' is specified.
-.SS
-Modifiers
-.TP
-\fB\-v\fR
-Invert the input filtering condition
-.TP
-\fB\-q\fR
-Don't report errors/warnings
-.TP
-\fB\-m\fR <lines>
-Limit output to the first <lines> lines
-.SS
-Output filters \fR\- only one may be used at a time
-.TP
-\fB\-c\fR
-Only report the number of lines that would have been printed
-.TP
-\fB\-pct\fR
-Output connect and response times percentiles
-.TP
-\fB\-st\fR
-Output number of requests per HTTP status code
-.TP
-\fB\-cc\fR
-Output number of requests per cookie code (2 chars)
-.TP
-\fB\-tc\fR
-Output number of requests per termination code (2 chars)
-.TP
-\fB\-srv\fR
-Output statistics per server (time, requests, errors)
-.TP
-\fB\-u\fR*
-Output statistics per URL (time, requests, errors)
-.br
-Additional characters indicate the output sorting key:
-.RS
-.TP
-\fB\-u\fR
-URL
-.TP
-\fB\-uc\fR
-Request count
-.TP
-\fB\-ue\fR
-Error count
-.TP
-\fB\-ua\fR
-Average response time
-.TP
-\fB\-ut\fR
-Average total time
-.TP
-\fB\-uao\fR, \fB\-uto\fR
-Average times computed on valid ('OK') requests
-.TP
-\fB\-uba\fR
-Average bytes returned
-.TP
-\fB\-ubt\fR
-Total bytes returned
-.RE
-.SH "SEE ALSO"
-.BR haproxy (1)
-.SH AUTHOR
-.PP
-\fBhalog\fR was written by Willy Tarreau <w@1wt.eu> and is part of \fBhaproxy\fR(1).
-.PP
-This manual page was written by Apollon Oikonomopoulos <apoikos@gmail.com> for the Debian project (but may
-be used by others).
-
+++ /dev/null
-Syslog support
---------------
-Upstream recommends using syslog over UDP to log from HAProxy processes, as
-this allows seamless logging from chroot'ed processes without access to
-/dev/log. However, many syslog implementations do not enable UDP syslog by
-default.
-
-The default HAProxy configuration in Debian uses /dev/log for logging and
-ships an rsyslog snippet that creates /dev/log in HAProxy's chroot and logs all
-HAProxy messages to /var/log/haproxy.log. To take advantage of this, you must
-restart rsyslog after installing this package. For other syslog daemons you
-will have to take manual measures to enable UDP logging or create /dev/log
-under HAProxy's chroot:
-a. For sysklogd, add SYSLOG="-a /var/lib/haproxy/dev/log" to
- /etc/default/syslog.
-b. For inetutils-syslogd, add SYSLOGD_OPTS="-a /var/lib/haproxy/dev/log" to
- /etc/default/inetutils-syslogd.
+++ /dev/null
-global
- log /dev/log local0
- log /dev/log local1 notice
- chroot /var/lib/haproxy
- stats socket /run/haproxy/admin.sock mode 660 level admin
- stats timeout 30s
- user haproxy
- group haproxy
- daemon
-
- # Default SSL material locations
- ca-base /etc/ssl/certs
- crt-base /etc/ssl/private
-
- # Default ciphers to use on SSL-enabled listening sockets.
- # For more information, see ciphers(1SSL). This list is from:
- # https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
- ssl-default-bind-ciphers ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS
- ssl-default-bind-options no-sslv3
-
-defaults
- log global
- mode http
- option httplog
- option dontlognull
- timeout connect 5000
- timeout client 50000
- timeout server 50000
- errorfile 400 /etc/haproxy/errors/400.http
- errorfile 403 /etc/haproxy/errors/403.http
- errorfile 408 /etc/haproxy/errors/408.http
- errorfile 500 /etc/haproxy/errors/500.http
- errorfile 502 /etc/haproxy/errors/502.http
- errorfile 503 /etc/haproxy/errors/503.http
- errorfile 504 /etc/haproxy/errors/504.http
+++ /dev/null
-# Defaults file for HAProxy
-#
-# This is sourced by both, the initscript and the systemd unit file, so do not
-# treat it as a shell script fragment.
-
-# Change the config file location if needed
-#CONFIG="/etc/haproxy/haproxy.cfg"
-
-# Add extra flags here, see haproxy(1) for a few options
-#EXTRAOPTS="-de -m 16"
+++ /dev/null
-etc/haproxy
-etc/haproxy/errors
-var/lib/haproxy
-var/lib/haproxy/dev
+++ /dev/null
-doc/architecture.txt
-doc/configuration.txt
-contrib
-README
+++ /dev/null
-examples/*.cfg
+++ /dev/null
-#!/bin/sh
-### BEGIN INIT INFO
-# Provides: haproxy
-# Required-Start: $local_fs $network $remote_fs $syslog $named
-# Required-Stop: $local_fs $remote_fs $syslog $named
-# Default-Start: 2 3 4 5
-# Default-Stop: 0 1 6
-# Short-Description: fast and reliable load balancing reverse proxy
-# Description: This file should be used to start and stop haproxy.
-### END INIT INFO
-
-# Author: Arnaud Cornet <acornet@debian.org>
-
-PATH=/sbin:/usr/sbin:/bin:/usr/bin
-PIDFILE=/var/run/haproxy.pid
-CONFIG=/etc/haproxy/haproxy.cfg
-HAPROXY=/usr/sbin/haproxy
-RUNDIR=/run/haproxy
-EXTRAOPTS=
-
-test -x $HAPROXY || exit 0
-
-if [ -e /etc/default/haproxy ]; then
- . /etc/default/haproxy
-fi
-
-test -f "$CONFIG" || exit 0
-
-[ -f /etc/default/rcS ] && . /etc/default/rcS
-. /lib/lsb/init-functions
-
-
-check_haproxy_config()
-{
- $HAPROXY -c -f "$CONFIG" >/dev/null
- if [ $? -eq 1 ]; then
- log_end_msg 1
- exit 1
- fi
-}
-
-haproxy_start()
-{
- [ -d "$RUNDIR" ] || mkdir "$RUNDIR"
- chown haproxy:haproxy "$RUNDIR"
- chmod 2775 "$RUNDIR"
-
- check_haproxy_config
-
- start-stop-daemon --quiet --oknodo --start --pidfile "$PIDFILE" \
- --exec $HAPROXY -- -f "$CONFIG" -D -p "$PIDFILE" \
- $EXTRAOPTS || return 2
- return 0
-}
-
-haproxy_stop()
-{
- if [ ! -f $PIDFILE ] ; then
- # This is a success according to LSB
- return 0
- fi
-
- ret=0
- tmppid="$(mktemp)"
-
- # HAProxy's pidfile may contain multiple PIDs, if nbproc > 1, so loop
- # over each PID. Note that start-stop-daemon has a --pid option, but it
- # was introduced in dpkg 1.17.6, post wheezy, so we use a temporary
- # pidfile instead to ease backports.
- for pid in $(cat $PIDFILE); do
- echo "$pid" > "$tmppid"
- start-stop-daemon --quiet --oknodo --stop \
- --retry 5 --pidfile "$tmppid" --exec $HAPROXY || ret=$?
- done
-
- rm -f "$tmppid"
- [ $ret -eq 0 ] && rm -f $PIDFILE
-
- return $ret
-}
-
-haproxy_reload()
-{
- check_haproxy_config
-
- $HAPROXY -f "$CONFIG" -p $PIDFILE -D $EXTRAOPTS -sf $(cat $PIDFILE) \
- || return 2
- return 0
-}
-
-haproxy_status()
-{
- if [ ! -f $PIDFILE ] ; then
- # program not running
- return 3
- fi
-
- for pid in $(cat $PIDFILE) ; do
- if ! ps --no-headers p "$pid" | grep haproxy > /dev/null ; then
- # program running, bogus pidfile
- return 1
- fi
- done
-
- return 0
-}
-
-
-case "$1" in
-start)
- log_daemon_msg "Starting haproxy" "haproxy"
- haproxy_start
- ret=$?
- case "$ret" in
- 0)
- log_end_msg 0
- ;;
- 1)
- log_end_msg 1
- echo "pid file '$PIDFILE' found, haproxy not started."
- ;;
- 2)
- log_end_msg 1
- ;;
- esac
- exit $ret
- ;;
-stop)
- log_daemon_msg "Stopping haproxy" "haproxy"
- haproxy_stop
- ret=$?
- case "$ret" in
- 0|1)
- log_end_msg 0
- ;;
- 2)
- log_end_msg 1
- ;;
- esac
- exit $ret
- ;;
-reload|force-reload)
- log_daemon_msg "Reloading haproxy" "haproxy"
- haproxy_reload
- ret=$?
- case "$ret" in
- 0|1)
- log_end_msg 0
- ;;
- 2)
- log_end_msg 1
- ;;
- esac
- exit $ret
- ;;
-restart)
- log_daemon_msg "Restarting haproxy" "haproxy"
- haproxy_stop
- haproxy_start
- ret=$?
- case "$ret" in
- 0)
- log_end_msg 0
- ;;
- 1)
- log_end_msg 1
- ;;
- 2)
- log_end_msg 1
- ;;
- esac
- exit $ret
- ;;
-status)
- haproxy_status
- ret=$?
- case "$ret" in
- 0)
- echo "haproxy is running."
- ;;
- 1)
- echo "haproxy dead, but $PIDFILE exists."
- ;;
- *)
- echo "haproxy not running."
- ;;
- esac
- exit $ret
- ;;
-*)
- echo "Usage: /etc/init.d/haproxy {start|stop|reload|restart|status}"
- exit 2
- ;;
-esac
-
-:
+++ /dev/null
-debian/haproxy.cfg etc/haproxy
-examples/errorfiles/*.http etc/haproxy/errors
-contrib/systemd/haproxy.service lib/systemd/system
-contrib/halog/halog usr/bin
+++ /dev/null
-haproxy binary: binary-without-manpage usr/sbin/haproxy-systemd-wrapper
+++ /dev/null
-mv_conffile /etc/rsyslog.d/haproxy.conf /etc/rsyslog.d/49-haproxy.conf 1.5.3-2~
+++ /dev/null
-doc/haproxy.1
-doc/lua-api/_build/man/haproxy-lua.1
-debian/halog.1
+++ /dev/null
-#!/bin/sh
-
-set -e
-
-adduser --system --disabled-password --disabled-login --home /var/lib/haproxy \
- --no-create-home --quiet --force-badname --group haproxy
-
-#DEBHELPER#
-
-if [ -n "$2" ] && dpkg --compare-versions "$2" gt "1.5~dev24-2~"; then
- # Reload already running instances. Since 1.5~dev24-2 we do not stop
- # haproxy in prerm during upgrades.
- invoke-rc.d haproxy reload || true
-fi
-
-exit 0
+++ /dev/null
-#!/bin/sh
-
-set -e
-
-#DEBHELPER#
-
-case "$1" in
- purge)
- deluser --system haproxy || true
- delgroup --system haproxy || true
- ;;
- *)
- ;;
-esac
-
-exit 0
+++ /dev/null
-d /run/haproxy 2775 haproxy haproxy -
+++ /dev/null
-" detect HAProxy configuration
-au BufRead,BufNewFile haproxy*.cfg set filetype=haproxy
+++ /dev/null
-/var/log/haproxy.log {
- daily
- rotate 52
- missingok
- notifempty
- compress
- delaycompress
- postrotate
- invoke-rc.d rsyslog rotate >/dev/null 2>&1 || true
- endscript
-}
+++ /dev/null
-From ca3fa95fbb1cc4060dcdd785cd76b1fa82c13b4a Mon Sep 17 00:00:00 2001
-From: Sergii Golovatiuk <sgolovatiuk@mirantis.com>
-Date: Tue, 24 May 2016 13:54:12 +0000
-Subject: [PATCH] Adding "include" configuration statement to haproxy.
-MIME-Version: 1.0
-Content-Type: text/plain; charset=UTF-8
-Content-Transfer-Encoding: 8bit
-
-This patch ia based on original work done by Brane F. Gračnar:
-http://marc.info/?l=haproxy&m=129235503410444
-
-Original patch was modified according to upstream changes in 1.6.*
----
- include/common/cfgparse.h | 6 +-
- src/cfgparse.c | 159 +++++++++++++++++++++++++++++++++++++++++++++-
- src/haproxy.c | 2 +-
- 3 files changed, 162 insertions(+), 5 deletions(-)
-
-diff --git a/include/common/cfgparse.h b/include/common/cfgparse.h
-index d785327..b521302 100644
---- a/include/common/cfgparse.h
-+++ b/include/common/cfgparse.h
-@@ -36,6 +36,10 @@
- #define CFG_USERLIST 3
- #define CFG_PEERS 4
-
-+
-+/* maximum include recursion level */
-+#define INCLUDE_RECURSION_LEVEL_MAX 10
-+
- struct cfg_keyword {
- int section; /* section type for this keyword */
- const char *kw; /* the keyword itself */
-@@ -65,7 +69,7 @@ extern int cfg_maxconn;
-
- int cfg_parse_global(const char *file, int linenum, char **args, int inv);
- int cfg_parse_listen(const char *file, int linenum, char **args, int inv);
--int readcfgfile(const char *file);
-+int readcfgfile(const char *file, int recdepth);
- void cfg_register_keywords(struct cfg_kw_list *kwl);
- void cfg_unregister_keywords(struct cfg_kw_list *kwl);
- void init_default_instance();
-diff --git a/src/cfgparse.c b/src/cfgparse.c
-index 97f4243..99a19e5 100644
---- a/src/cfgparse.c
-+++ b/src/cfgparse.c
-@@ -32,6 +32,8 @@
- #include <sys/stat.h>
- #include <fcntl.h>
- #include <unistd.h>
-+#include <glob.h>
-+#include <libgen.h>
-
- #include <common/cfgparse.h>
- #include <common/chunk.h>
-@@ -6844,6 +6846,149 @@ out:
- return err_code;
- }
-
-+/**
-+ * This function takes glob(3) pattern and tries to resolve
-+ * that pattern to files and tries to include them.
-+ *
-+ * See readcfgfile() for return values.
-+ */
-+int cfgfile_include (char *pattern, char *dir, int recdepth) {
-+
-+ int err_code = 0;
-+
-+ if (pattern == NULL) {
-+ Alert("Config file include pattern == NULL; This should never happen.\n");
-+ err_code |= ERR_ABORT;
-+ goto out;
-+ }
-+ if (recdepth >= INCLUDE_RECURSION_LEVEL_MAX) {
-+ Alert(
-+ "Refusing to include filename pattern: '%s': too deep recursion level: %d.\n",
-+ pattern,
-+ recdepth
-+ );
-+ err_code|= ERR_ABORT;
-+ goto out;
-+ }
-+
-+ /** don't waste time with empty strings */
-+ if (strlen(pattern) < 1) return 0;
-+
-+ /** we want to support relative to include file glob patterns */
-+ int buf_len = 3;
-+ if (dir != NULL)
-+ buf_len += strlen(dir);
-+ buf_len += strlen(pattern);
-+ char *real_pattern = malloc(buf_len);
-+ if (real_pattern == NULL) {
-+ Alert("Error allocating memory for glob pattern: %s\n", strerror(errno));
-+ err_code |= ERR_ABORT;
-+ goto out;
-+ }
-+ memset(real_pattern, '\0', buf_len);
-+ if (dir != NULL && pattern[0] != '/') {
-+ strcat(real_pattern, dir);
-+ strcat(real_pattern, "/");
-+ }
-+ strcat(real_pattern, pattern);
-+
-+ /* file inclusion result */
-+ int result = 0;
-+
-+ /** glob the pattern */
-+ glob_t res;
-+ int rv = glob(
-+ real_pattern,
-+ (GLOB_NOESCAPE | GLOB_BRACE | GLOB_TILDE),
-+ NULL,
-+ &res
-+ );
-+ /* check for glob(3) injuries */
-+ switch (rv) {
-+ case GLOB_NOMATCH:
-+ /* nothing was found */
-+ break;
-+
-+ case GLOB_ABORTED:
-+ Alert("Error globbing pattern '%s': read error.\n", real_pattern);
-+ result = ERR_ABORT;
-+ break;
-+
-+ case GLOB_NOSPACE:
-+ Alert("Error globbing pattern '%s': out of memory.\n", real_pattern);
-+ result = ERR_ABORT;
-+ break;
-+
-+ default:
-+ ;
-+ int i = 0;
-+ for (i = 0; i < res.gl_pathc; i++) {
-+ char *file = res.gl_pathv[i];
-+
-+ /* parse configuration fragment */
-+ int r = readcfgfile(file, recdepth);
-+
-+ /* check for injuries */
-+ if (r != 0) {
-+ result = r;
-+ goto outta_cfgfile_include;
-+ }
-+ }
-+ }
-+
-+outta_cfgfile_include:
-+
-+ /** free glob result. */
-+ globfree(&res);
-+ free(real_pattern);
-+
-+ return result;
-+
-+out:
-+ return err_code;
-+}
-+
-+int
-+cfg_parse_include(const char *file, int linenum, char **args, int recdepth) {
-+
-+ int err_code = 0;
-+
-+ if (strcmp(args[0], "include") == 0) {
-+ if (args[1] == NULL || strlen(args[1]) < 1) {
-+ Alert("parsing [%s:%d]: include statement requires file glob pattern.\n",
-+ file, linenum);
-+ err_code |= ERR_ABORT;
-+ goto out;
-+ }
-+ /**
-+ * compute file's dirname - this is necessary because
-+ * dirname(3) returns shared buffer address
-+ */
-+ int buf_len = strlen(file) + 1;
-+ char *file_dir = malloc(buf_len);
-+ if (file_dir == NULL) {
-+ Alert("Unable to allocate memory for config file dirname.");
-+ err_code |= ERR_ABORT;
-+ goto out;
-+ }
-+ memset(file_dir, '\0', buf_len);
-+ strcpy(file_dir, file);
-+ strcpy(file_dir, dirname(file_dir));
-+
-+ /* include pattern */
-+ int r = cfgfile_include(args[1], file_dir, (recdepth + 1));
-+ //int r = cfgfile_include(args[1], file_dir, 1);
-+ free(file_dir);
-+ /* check for injuries */
-+ if (r != 0) {
-+ err_code |= r;
-+ goto out;
-+ }
-+ }
-+out:
-+ return err_code;
-+}
-+
- /*
- * This function reads and parses the configuration file given in the argument.
- * Returns the error code, 0 if OK, or any combination of :
-@@ -6854,7 +6999,7 @@ out:
- * Only the two first ones can stop processing, the two others are just
- * indicators.
- */
--int readcfgfile(const char *file)
-+int readcfgfile(const char *file, int recdepth)
- {
- char *thisline;
- int linesize = LINESIZE;
-@@ -6878,13 +7023,16 @@ int readcfgfile(const char *file)
- !cfg_register_section("global", cfg_parse_global) ||
- !cfg_register_section("userlist", cfg_parse_users) ||
- !cfg_register_section("peers", cfg_parse_peers) ||
-+ !cfg_register_section("include", cfg_parse_include) ||
- !cfg_register_section("mailers", cfg_parse_mailers) ||
- !cfg_register_section("namespace_list", cfg_parse_netns) ||
- !cfg_register_section("resolvers", cfg_parse_resolvers))
- return -1;
-
-- if ((f=fopen(file,"r")) == NULL)
-+ if ((f=fopen(file,"r")) == NULL) {
-+ Alert("Error opening configuration file %s: %s\n", file, strerror(errno));
- return -1;
-+ }
-
- next_line:
- while (fgets(thisline + readbytes, linesize - readbytes, f) != NULL) {
-@@ -7168,7 +7316,12 @@ next_line:
-
- /* else it's a section keyword */
- if (cs)
-- err_code |= cs->section_parser(file, linenum, args, kwm);
-+ if (strcmp("include", cs->section_name) == 0) {
-+ err_code |= cs->section_parser(file, linenum, args, recdepth);
-+ }
-+ else {
-+ err_code |= cs->section_parser(file, linenum, args, kwm);
-+ }
- else {
- Alert("parsing [%s:%d]: unknown keyword '%s' out of section.\n", file, linenum, args[0]);
- err_code |= ERR_ALERT | ERR_FATAL;
-diff --git a/src/haproxy.c b/src/haproxy.c
-index 4299328..63a9bfd 100644
---- a/src/haproxy.c
-+++ b/src/haproxy.c
-@@ -770,7 +770,7 @@ void init(int argc, char **argv)
- list_for_each_entry(wl, &cfg_cfgfiles, list) {
- int ret;
-
-- ret = readcfgfile(wl->s);
-+ ret = readcfgfile(wl->s, 0);
- if (ret == -1) {
- Alert("Could not open configuration file %s : %s\n",
- wl->s, strerror(errno));
---
-2.7.4
-
+++ /dev/null
-From: Apollon Oikonomopoulos <apoikos@debian.org>
-Date: Wed, 29 Apr 2015 13:51:49 +0300
-Subject: [PATCH] dconv: debianize
-
- - Use Debian bootstrap and jquery packages
- - Add Debian-related resources to the template
- - Use the package's version instead of HAProxy's git version
- - Strip the conversion date from the output to ensure reproducible
- build.
-
-diff --git a/debian/dconv/haproxy-dconv.py b/debian/dconv/haproxy-dconv.py
-index fe2b96dce325..702eefac6a3b 100755
---- a/debian/dconv/haproxy-dconv.py
-+++ b/debian/dconv/haproxy-dconv.py
-@@ -44,12 +44,11 @@ VERSION = ""
- HAPROXY_GIT_VERSION = False
-
- def main():
-- global VERSION, HAPROXY_GIT_VERSION
-+ global HAPROXY_GIT_VERSION
-
- usage="Usage: %prog --infile <infile> --outfile <outfile>"
-
- optparser = OptionParser(description='Generate HTML Document from HAProxy configuation.txt',
-- version=VERSION,
- usage=usage)
- optparser.add_option('--infile', '-i', help='Input file mostly the configuration.txt')
- optparser.add_option('--outfile','-o', help='Output file')
-@@ -65,11 +64,7 @@ def main():
-
- os.chdir(os.path.dirname(__file__))
-
-- VERSION = get_git_version()
-- if not VERSION:
-- sys.exit(1)
--
-- HAPROXY_GIT_VERSION = get_haproxy_git_version(os.path.dirname(option.infile))
-+ HAPROXY_GIT_VERSION = get_haproxy_debian_version(os.path.dirname(option.infile))
-
- convert(option.infile, option.outfile, option.base)
-
-@@ -114,6 +109,15 @@ def get_haproxy_git_version(path):
- version = re.sub(r'-g.*', '', version)
- return version
-
-+def get_haproxy_debian_version(path):
-+ try:
-+ version = subprocess.check_output(["dpkg-parsechangelog", "-Sversion"],
-+ cwd=os.path.join(path, ".."))
-+ except subprocess.CalledProcessError:
-+ return False
-+
-+ return version.strip()
-+
- def getTitleDetails(string):
- array = string.split(".")
-
-@@ -506,7 +510,6 @@ def convert(infile, outfile, base=''):
- keywords = keywords,
- keywordsCount = keywordsCount,
- keyword_conflicts = keyword_conflicts,
-- version = VERSION,
- date = datetime.datetime.now().strftime("%Y/%m/%d"),
- )
- except TopLevelLookupException:
-@@ -524,7 +527,6 @@ def convert(infile, outfile, base=''):
- keywords = keywords,
- keywordsCount = keywordsCount,
- keyword_conflicts = keyword_conflicts,
-- version = VERSION,
- date = datetime.datetime.now().strftime("%Y/%m/%d"),
- footer = footer
- )
-diff --git a/debian/dconv/templates/template.html b/debian/dconv/templates/template.html
-index c72b3558c2dd..9aefa16dd82d 100644
---- a/debian/dconv/templates/template.html
-+++ b/debian/dconv/templates/template.html
-@@ -3,8 +3,8 @@
- <head>
- <meta charset="utf-8" />
- <title>${headers['title']} ${headers['version']} - ${headers['subtitle']}</title>
-- <link href="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/css/bootstrap.min.css" rel="stylesheet" />
-- <link href="${base}css/page.css?${version}" rel="stylesheet" />
-+ <link href="${base}css/bootstrap.min.css" rel="stylesheet" />
-+ <link href="${base}css/page.css" rel="stylesheet" />
- </head>
- <body>
- <nav class="navbar navbar-default navbar-fixed-top" role="navigation">
-@@ -15,7 +15,7 @@
- <span class="icon-bar"></span>
- <span class="icon-bar"></span>
- </button>
-- <a class="navbar-brand" href="${base}index.html">${headers['title']} <small>${headers['subtitle']}</small></a>
-+ <a class="navbar-brand" href="${base}configuration.html">${headers['title']}</a>
- </div>
- <!-- /.navbar-header -->
-
-@@ -24,31 +24,16 @@
- <ul class="nav navbar-nav">
- <li><a href="http://www.haproxy.org/">HAProxy home page</a></li>
- <li class="dropdown">
-- <a href="#" class="dropdown-toggle" data-toggle="dropdown">Versions <b class="caret"></b></a>
-+ <a href="#" class="dropdown-toggle" data-toggle="dropdown">Debian resources <b class="caret"></b></a>
- <ul class="dropdown-menu">
- ## TODO : provide a structure to dynamically generate per version links
-- <li class="dropdown-header">HAProxy 1.4</li>
-- <li><a href="${base}configuration-1.4.html">Configuration Manual <small>(stable)</small></a></li>
-- <li><a href="${base}snapshot/configuration-1.4.html">Configuration Manual <small>(snapshot)</small></a></li>
-- <li><a href="http://git.1wt.eu/git/haproxy-1.4.git/">GIT Repository</a></li>
-- <li><a href="http://www.haproxy.org/git/?p=haproxy-1.4.git">Browse repository</a></li>
-- <li><a href="http://www.haproxy.org/download/1.4/">Browse directory</a></li>
-- <li class="divider"></li>
-- <li class="dropdown-header">HAProxy 1.5</li>
-- <li><a href="${base}configuration-1.5.html">Configuration Manual <small>(stable)</small></a></li>
-- <li><a href="${base}snapshot/configuration-1.5.html">Configuration Manual <small>(snapshot)</small></a></li>
-- <li><a href="http://git.1wt.eu/git/haproxy-1.5.git/">GIT Repository</a></li>
-- <li><a href="http://www.haproxy.org/git/?p=haproxy-1.5.git">Browse repository</a></li>
-- <li><a href="http://www.haproxy.org/download/1.5/">Browse directory</a></li>
-- <li class="divider"></li>
-- <li class="dropdown-header">HAProxy 1.6</li>
-- <li><a href="${base}configuration-1.6.html">Configuration Manual <small>(stable)</small></a></li>
-- <li><a href="${base}snapshot/configuration-1.6.html">Configuration Manual <small>(snapshot)</small></a></li>
-- <li><a href="${base}intro-1.6.html">Starter Guide <small>(stable)</small></a></li>
-- <li><a href="${base}snapshot/intro-1.6.html">Starter Guide <small>(snapshot)</small></a></li>
-- <li><a href="http://git.1wt.eu/git/haproxy.git/">GIT Repository</a></li>
-- <li><a href="http://www.haproxy.org/git/?p=haproxy.git">Browse repository</a></li>
-- <li><a href="http://www.haproxy.org/download/1.6/">Browse directory</a></li>
-+ <li><a href="https://bugs.debian.org/src:haproxy">Bug Tracking System</a></li>
-+ <li><a href="https://packages.debian.org/haproxy">Package page</a></li>
-+ <li><a href="http://tracker.debian.org/pkg/haproxy">Package Tracking System</a></li>
-+ <li class="divider"></li>
-+ <li><a href="${base}intro.html">Starter Guide</a></li>
-+ <li><a href="${base}configuration.html">Configuration Manual</a></li>
-+ <li><a href="http://anonscm.debian.org/gitweb/?p=pkg-haproxy/haproxy.git">Package Git Repository</a></li>
- </ul>
- </li>
- </ul>
-@@ -72,7 +57,7 @@
- The feature is automatically disabled when the search field is focused.
- </p>
- <p class="text-right">
-- <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a> v<b>${version}</b> on <b>${date}</b></small>
-+ <small>Converted with <a href="https://github.com/cbonte/haproxy-dconv">haproxy-dconv</a></small>
- </p>
- </div>
- <!-- /.sidebar -->
-@@ -83,7 +68,7 @@
- <div class="text-center">
- <h1>${headers['title']}</h1>
- <h2>${headers['subtitle']}</h2>
-- <p><strong>${headers['version']}</strong></p>
-+ <p><strong>${headers['version']} (Debian)</strong></p>
- <p>
- <a href="http://www.haproxy.org/" title="HAProxy Home Page"><img src="${base}img/logo-med.png" /></a><br>
- ${headers['author']}<br>
-@@ -114,9 +99,9 @@
- </div>
- <!-- /#wrapper -->
-
-- <script src="//cdnjs.cloudflare.com/ajax/libs/jquery/1.11.0/jquery.min.js"></script>
-- <script src="//cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.1.1/js/bootstrap.min.js"></script>
-- <script src="//cdnjs.cloudflare.com/ajax/libs/typeahead.js/0.11.1/typeahead.bundle.min.js"></script>
-+ <script src="${base}js/jquery.min.js"></script>
-+ <script src="${base}js/bootstrap.min.js"></script>
-+ <script src="${base}js/typeahead.bundle.js"></script>
- <script>
- /* Keyword search */
- var searchFocus = false
+++ /dev/null
-Subject: Add documentation field to the systemd unit
-Author: Apollon Oikonomopoulos <apoikos@gmail.com>
-
-Forwarded: no
-Last-Update: 2014-01-03
---- a/contrib/systemd/haproxy.service.in
-+++ b/contrib/systemd/haproxy.service.in
-@@ -1,5 +1,7 @@
- [Unit]
- Description=HAProxy Load Balancer
-+Documentation=man:haproxy(1)
-+Documentation=file:/usr/share/doc/haproxy/configuration.txt.gz
- After=network.target syslog.service
- Wants=syslog.service
-
+++ /dev/null
-Author: Apollon Oikonomopoulos
-Description: Check the configuration before reloading HAProxy
- While HAProxy will survive a reload with an invalid configuration, explicitly
- checking the config file for validity will make "systemctl reload" return an
- error and let the user know something went wrong.
-
-Forwarded: no
-Last-Update: 2014-04-27
-Index: haproxy/contrib/systemd/haproxy.service.in
-===================================================================
---- haproxy.orig/contrib/systemd/haproxy.service.in
-+++ haproxy/contrib/systemd/haproxy.service.in
-@@ -8,6 +8,7 @@ Wants=syslog.service
- [Service]
- ExecStartPre=@SBINDIR@/haproxy -f /etc/haproxy/haproxy.cfg -c -q
- ExecStart=@SBINDIR@/haproxy-systemd-wrapper -f /etc/haproxy/haproxy.cfg -p /run/haproxy.pid
-+ExecReload=@SBINDIR@/haproxy -c -f /etc/haproxy/haproxy.cfg
- ExecReload=/bin/kill -USR2 $MAINPID
- KillMode=mixed
- Restart=always
+++ /dev/null
-Subject: start after the syslog service using systemd
-Author: Apollon Oikonomopoulos <apoikos@gmail.com>
-
-Forwarded: no
-Last-Update: 2013-10-15
-Index: haproxy/contrib/systemd/haproxy.service.in
-===================================================================
---- haproxy.orig/contrib/systemd/haproxy.service.in
-+++ haproxy/contrib/systemd/haproxy.service.in
-@@ -1,6 +1,7 @@
- [Unit]
- Description=HAProxy Load Balancer
--After=network.target
-+After=network.target syslog.service
-+Wants=syslog.service
-
- [Service]
- ExecStartPre=@SBINDIR@/haproxy -f /etc/haproxy/haproxy.cfg -c -q
+++ /dev/null
-Author: Apollon Oikonomopoulos <apoikos@debian.org>
-Description: Use the variables from /etc/default/haproxy
- This will allow seamless upgrades from the sysvinit system while respecting
- any changes the users may have made. It will also make local configuration
- easier than overriding the systemd unit file.
-
-Last-Update: 2014-06-20
-Forwarded: not-needed
-Index: haproxy/contrib/systemd/haproxy.service.in
-===================================================================
---- haproxy.orig/contrib/systemd/haproxy.service.in
-+++ haproxy/contrib/systemd/haproxy.service.in
-@@ -6,9 +6,11 @@ After=network.target syslog.service
- Wants=syslog.service
-
- [Service]
--ExecStartPre=@SBINDIR@/haproxy -f /etc/haproxy/haproxy.cfg -c -q
--ExecStart=@SBINDIR@/haproxy-systemd-wrapper -f /etc/haproxy/haproxy.cfg -p /run/haproxy.pid
--ExecReload=@SBINDIR@/haproxy -c -f /etc/haproxy/haproxy.cfg
-+Environment=CONFIG=/etc/haproxy/haproxy.cfg
-+EnvironmentFile=-/etc/default/haproxy
-+ExecStartPre=@SBINDIR@/haproxy -f ${CONFIG} -c -q
-+ExecStart=@SBINDIR@/haproxy-systemd-wrapper -f ${CONFIG} -p /run/haproxy.pid $EXTRAOPTS
-+ExecReload=@SBINDIR@/haproxy -c -f ${CONFIG}
- ExecReload=/bin/kill -USR2 $MAINPID
- KillMode=mixed
- Restart=always
+++ /dev/null
-0002-Use-dpkg-buildflags-to-build-halog.patch
-haproxy.service-start-after-syslog.patch
-haproxy.service-add-documentation.patch
-haproxy.service-check-config-before-reload.patch
-haproxy.service-use-environment-variables.patch
-MIRA0001-Adding-include-configuration-statement-to-haproxy.patch
+++ /dev/null
-# Create an additional socket in haproxy's chroot in order to allow logging via
-# /dev/log to chroot'ed HAProxy processes
-$AddUnixListenSocket /var/lib/haproxy/dev/log
-
-# Send HAProxy messages to a dedicated logfile
-if $programname startswith 'haproxy' then /var/log/haproxy.log
-&~
+++ /dev/null
-#!/usr/bin/make -f
-
-export DEB_LDFLAGS_MAINT_APPEND = -Wl,--as-needed
-
-MAKEARGS=DESTDIR=debian/haproxy \
- PREFIX=/usr \
- IGNOREGIT=true \
- MANDIR=/usr/share/man \
- DOCDIR=/usr/share/doc/haproxy \
- USE_PCRE=1 PCREDIR= \
- USE_OPENSSL=1 \
- USE_ZLIB=1 \
- USE_LUA=1 \
- LUA_INC=/usr/include/lua5.3
-
-OS_TYPE = $(shell dpkg-architecture -qDEB_HOST_ARCH_OS)
-
-ifeq ($(OS_TYPE),linux)
- MAKEARGS+= TARGET=linux2628
-else ifeq ($(OS_TYPE),kfreebsd)
- MAKEARGS+= TARGET=freebsd
-else
- MAKEARGS+= TARGET=generic
-endif
-
-ifneq ($(filter amd64 i386, $(shell dpkg-architecture -qDEB_HOST_ARCH_CPU)),)
- MAKEARGS+= USE_REGPARM=1
-else ifeq ($(shell dpkg-architecture -qDEB_HOST_ARCH_CPU),amd64)
- MAKEARGS+= USE_REGPARM=1
-endif
-
-MAKEARGS += CFLAGS="$(shell dpkg-buildflags --get CFLAGS) $(shell dpkg-buildflags --get CPPFLAGS)"
-MAKEARGS += LDFLAGS="$(shell dpkg-buildflags --get LDFLAGS)"
-
-%:
- dh $@ --with systemd,sphinxdoc
-
-override_dh_auto_configure:
-
-override_dh_auto_build-arch:
- make $(MAKEARGS)
- make -C contrib/systemd $(MAKEARGS)
- dh_auto_build -Dcontrib/halog
- $(MAKE) -C doc/lua-api man
-
-override_dh_auto_build-indep:
- # Build the HTML documentation, after patching dconv
- patch -p1 < $(CURDIR)/debian/patches/debianize-dconv.patch
- python -B $(CURDIR)/debian/dconv/haproxy-dconv.py \
- -i $(CURDIR)/doc/configuration.txt \
- -o $(CURDIR)/doc/configuration.html
- python -B $(CURDIR)/debian/dconv/haproxy-dconv.py \
- -i $(CURDIR)/doc/intro.txt \
- -o $(CURDIR)/doc/intro.html
- patch -p1 -R < $(CURDIR)/debian/patches/debianize-dconv.patch
- $(MAKE) -C doc/lua-api html
-
-override_dh_auto_clean:
- make -C contrib/systemd clean
- $(MAKE) -C doc/lua-api clean
- dh_auto_clean
- dh_auto_clean -Dcontrib/halog
-
-override_dh_auto_install-arch:
- make $(MAKEARGS) install
- install -m 0644 -D debian/rsyslog.conf debian/haproxy/etc/rsyslog.d/49-haproxy.conf
- install -m 0644 -D debian/logrotate.conf debian/haproxy/etc/logrotate.d/haproxy
-
-override_dh_auto_install-indep:
-
-override_dh_installdocs:
- dh_installdocs -Xsystemd/ -Xhalog/
-
-override_dh_installexamples:
- dh_installexamples -X build.cfg
-
-override_dh_installinit:
- dh_installinit --no-restart-on-upgrade
-
-override_dh_strip:
- dh_strip --dbg-package=haproxy-dbg
+++ /dev/null
-3.0 (quilt)
+++ /dev/null
-debian/dconv/css/check.png
-debian/dconv/css/cross.png
-debian/dconv/img/logo-med.png
+++ /dev/null
-debian/vim-haproxy.yaml /usr/share/vim/registry
-debian/haproxy.vim /usr/share/vim/addons/ftdetect
-examples/haproxy.vim /usr/share/vim/addons/syntax
+++ /dev/null
-addon: haproxy
-description: "Syntax highlighting for HAProxy"
-files:
- - syntax/haproxy.vim
- - ftdetect/haproxy.vim
+++ /dev/null
-version=3
-opts="uversionmangle=s/-(dev\d+)/~$1/" http://haproxy.1wt.eu/download/1.6/src/ haproxy-(1\.6\.\d+)\.(?:tgz|tbz2|tar\.(?:gz|bz2|xz))