about summary refs log tree commit diff
path: root/pkgs/tools/networking/curl
diff options
context:
space:
mode:
Diffstat (limited to 'pkgs/tools/networking/curl')
-rw-r--r--pkgs/tools/networking/curl/8.7.1-compression-fix.patch168
-rw-r--r--pkgs/tools/networking/curl/default.nix15
2 files changed, 180 insertions, 3 deletions
diff --git a/pkgs/tools/networking/curl/8.7.1-compression-fix.patch b/pkgs/tools/networking/curl/8.7.1-compression-fix.patch
new file mode 100644
index 0000000000000..53747b54c4610
--- /dev/null
+++ b/pkgs/tools/networking/curl/8.7.1-compression-fix.patch
@@ -0,0 +1,168 @@
+From b30d694a027eb771c02a3db0dee0ca03ccab7377 Mon Sep 17 00:00:00 2001
+From: Stefan Eissing <stefan@eissing.org>
+Date: Thu, 28 Mar 2024 11:08:15 +0100
+Subject: [PATCH] content_encoding: brotli and others, pass through 0-length
+ writes
+
+- curl's transfer handling may write 0-length chunks at the end of the
+  download with an EOS flag. (HTTP/2 does this commonly)
+
+- content encoders need to pass-through such a write and not count this
+  as error in case they are finished decoding
+
+Fixes #13209
+Fixes #13212
+Closes #13219
+---
+ lib/content_encoding.c         | 10 +++++-----
+ tests/http/test_02_download.py | 13 +++++++++++++
+ tests/http/testenv/env.py      |  7 ++++++-
+ tests/http/testenv/httpd.py    | 20 ++++++++++++++++++++
+ 4 files changed, 44 insertions(+), 6 deletions(-)
+
+diff --git a/lib/content_encoding.c b/lib/content_encoding.c
+index c1abf24e8c027c..8e926dd2ecd5ad 100644
+--- a/lib/content_encoding.c
++++ b/lib/content_encoding.c
+@@ -300,7 +300,7 @@ static CURLcode deflate_do_write(struct Curl_easy *data,
+   struct zlib_writer *zp = (struct zlib_writer *) writer;
+   z_stream *z = &zp->z;     /* zlib state structure */
+ 
+-  if(!(type & CLIENTWRITE_BODY))
++  if(!(type & CLIENTWRITE_BODY) || !nbytes)
+     return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
+ 
+   /* Set the compressed input when this function is called */
+@@ -457,7 +457,7 @@ static CURLcode gzip_do_write(struct Curl_easy *data,
+   struct zlib_writer *zp = (struct zlib_writer *) writer;
+   z_stream *z = &zp->z;     /* zlib state structure */
+ 
+-  if(!(type & CLIENTWRITE_BODY))
++  if(!(type & CLIENTWRITE_BODY) || !nbytes)
+     return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
+ 
+   if(zp->zlib_init == ZLIB_INIT_GZIP) {
+@@ -669,7 +669,7 @@ static CURLcode brotli_do_write(struct Curl_easy *data,
+   CURLcode result = CURLE_OK;
+   BrotliDecoderResult r = BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT;
+ 
+-  if(!(type & CLIENTWRITE_BODY))
++  if(!(type & CLIENTWRITE_BODY) || !nbytes)
+     return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
+ 
+   if(!bp->br)
+@@ -762,7 +762,7 @@ static CURLcode zstd_do_write(struct Curl_easy *data,
+   ZSTD_outBuffer out;
+   size_t errorCode;
+ 
+-  if(!(type & CLIENTWRITE_BODY))
++  if(!(type & CLIENTWRITE_BODY) || !nbytes)
+     return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
+ 
+   if(!zp->decomp) {
+@@ -916,7 +916,7 @@ static CURLcode error_do_write(struct Curl_easy *data,
+   (void) buf;
+   (void) nbytes;
+ 
+-  if(!(type & CLIENTWRITE_BODY))
++  if(!(type & CLIENTWRITE_BODY) || !nbytes)
+     return Curl_cwriter_write(data, writer->next, type, buf, nbytes);
+ 
+   failf(data, "Unrecognized content encoding type. "
+diff --git a/tests/http/test_02_download.py b/tests/http/test_02_download.py
+index 4db9c9d36e9ed5..395fc862f2f839 100644
+--- a/tests/http/test_02_download.py
++++ b/tests/http/test_02_download.py
+@@ -394,6 +394,19 @@ def test_02_27_paused_no_cl(self, env: Env, httpd, nghttpx, repeat):
+         r = client.run(args=[url])
+         r.check_exit_code(0)
+ 
++    @pytest.mark.parametrize("proto", ['http/1.1', 'h2', 'h3'])
++    def test_02_28_get_compressed(self, env: Env, httpd, nghttpx, repeat, proto):
++        if proto == 'h3' and not env.have_h3():
++            pytest.skip("h3 not supported")
++        count = 1
++        urln = f'https://{env.authority_for(env.domain1brotli, proto)}/data-100k?[0-{count-1}]'
++        curl = CurlClient(env=env)
++        r = curl.http_download(urls=[urln], alpn_proto=proto, extra_args=[
++            '--compressed'
++        ])
++        r.check_exit_code(code=0)
++        r.check_response(count=count, http_status=200)
++
+     def check_downloads(self, client, srcfile: str, count: int,
+                         complete: bool = True):
+         for i in range(count):
+diff --git a/tests/http/testenv/env.py b/tests/http/testenv/env.py
+index a207059dcd57c5..13c5d6bd46ee57 100644
+--- a/tests/http/testenv/env.py
++++ b/tests/http/testenv/env.py
+@@ -129,10 +129,11 @@ def __init__(self):
+         self.htdocs_dir = os.path.join(self.gen_dir, 'htdocs')
+         self.tld = 'http.curl.se'
+         self.domain1 = f"one.{self.tld}"
++        self.domain1brotli = f"brotli.one.{self.tld}"
+         self.domain2 = f"two.{self.tld}"
+         self.proxy_domain = f"proxy.{self.tld}"
+         self.cert_specs = [
+-            CertificateSpec(domains=[self.domain1, 'localhost'], key_type='rsa2048'),
++            CertificateSpec(domains=[self.domain1, self.domain1brotli, 'localhost'], key_type='rsa2048'),
+             CertificateSpec(domains=[self.domain2], key_type='rsa2048'),
+             CertificateSpec(domains=[self.proxy_domain, '127.0.0.1'], key_type='rsa2048'),
+             CertificateSpec(name="clientsX", sub_specs=[
+@@ -376,6 +377,10 @@ def htdocs_dir(self) -> str:
+     def domain1(self) -> str:
+         return self.CONFIG.domain1
+ 
++    @property
++    def domain1brotli(self) -> str:
++        return self.CONFIG.domain1brotli
++
+     @property
+     def domain2(self) -> str:
+         return self.CONFIG.domain2
+diff --git a/tests/http/testenv/httpd.py b/tests/http/testenv/httpd.py
+index c04c22699a62c4..b8615875a9a558 100644
+--- a/tests/http/testenv/httpd.py
++++ b/tests/http/testenv/httpd.py
+@@ -50,6 +50,7 @@ class Httpd:
+         'alias', 'env', 'filter', 'headers', 'mime', 'setenvif',
+         'socache_shmcb',
+         'rewrite', 'http2', 'ssl', 'proxy', 'proxy_http', 'proxy_connect',
++        'brotli',
+         'mpm_event',
+     ]
+     COMMON_MODULES_DIRS = [
+@@ -203,6 +204,7 @@ def _mkpath(self, path):
+ 
+     def _write_config(self):
+         domain1 = self.env.domain1
++        domain1brotli = self.env.domain1brotli
+         creds1 = self.env.get_credentials(domain1)
+         domain2 = self.env.domain2
+         creds2 = self.env.get_credentials(domain2)
+@@ -285,6 +287,24 @@ def _write_config(self):
+                 f'</VirtualHost>',
+                 f'',
+             ])
++            # Alternate to domain1 with BROTLI compression
++            conf.extend([  # https host for domain1, h1 + h2
++                f'<VirtualHost *:{self.env.https_port}>',
++                f'    ServerName {domain1brotli}',
++                f'    Protocols h2 http/1.1',
++                f'    SSLEngine on',
++                f'    SSLCertificateFile {creds1.cert_file}',
++                f'    SSLCertificateKeyFile {creds1.pkey_file}',
++                f'    DocumentRoot "{self._docs_dir}"',
++                f'    SetOutputFilter BROTLI_COMPRESS',
++            ])
++            conf.extend(self._curltest_conf(domain1))
++            if domain1 in self._extra_configs:
++                conf.extend(self._extra_configs[domain1])
++            conf.extend([
++                f'</VirtualHost>',
++                f'',
++            ])
+             conf.extend([  # https host for domain2, no h2
+                 f'<VirtualHost *:{self.env.https_port}>',
+                 f'    ServerName {domain2}',
diff --git a/pkgs/tools/networking/curl/default.nix b/pkgs/tools/networking/curl/default.nix
index 6559a5cf4ecb0..154156e89463f 100644
--- a/pkgs/tools/networking/curl/default.nix
+++ b/pkgs/tools/networking/curl/default.nix
@@ -49,17 +49,26 @@ assert !((lib.count (x: x) [ gnutlsSupport opensslSupport wolfsslSupport rustlsS
 
 stdenv.mkDerivation (finalAttrs: {
   pname = "curl";
-  version = "8.7.1";
+  version = "8.8.0";
 
   src = fetchurl {
     urls = [
       "https://curl.haxx.se/download/curl-${finalAttrs.version}.tar.xz"
       "https://github.com/curl/curl/releases/download/curl-${builtins.replaceStrings [ "." ] [ "_" ] finalAttrs.version}/curl-${finalAttrs.version}.tar.xz"
     ];
-    hash = "sha256-b+oqrGpGEPvQQAr7C83b5yWKZMY/H2jlhV68DGWXEM0=";
+    hash = "sha256-D1i7lfwzDIpG7rPfVwGw2Qydm/zEK9HNCHkdElUdRAA=";
   };
 
+  patches = lib.optionals (lib.versionOlder finalAttrs.version "8.7.2") [
+    # https://github.com/curl/curl/pull/13219
+    # https://github.com/newsboat/newsboat/issues/2728
+    ./8.7.1-compression-fix.patch
+  ];
+
+  # this could be accomplished by updateAutotoolsGnuConfigScriptsHook, but that causes infinite recursion
+  # necessary for FreeBSD code path in configure
   postPatch = ''
+    substituteInPlace ./config.guess --replace-fail /usr/bin/uname uname
     patchShebangs scripts
   '';
 
@@ -205,7 +214,7 @@ stdenv.mkDerivation (finalAttrs: {
 
   meta = with lib; {
     changelog = "https://curl.se/changes.html#${lib.replaceStrings [ "." ] [ "_" ] finalAttrs.version}";
-    description = "A command line tool for transferring files with URL syntax";
+    description = "Command line tool for transferring files with URL syntax";
     homepage    = "https://curl.se/";
     license = licenses.curl;
     maintainers = with maintainers; [ lovek323 ];