summaryrefslogtreecommitdiff
path: root/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch
diff options
context:
space:
mode:
Diffstat (limited to 'backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch')
-rw-r--r--backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch112
1 files changed, 112 insertions, 0 deletions
diff --git a/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch b/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch
new file mode 100644
index 0000000..129e9ce
--- /dev/null
+++ b/backport-urlapi-make-sure-zoneid-is-also-duplicated-in-curl_u.patch
@@ -0,0 +1,112 @@
+From 49e244318672c688097c1bf601a110005cd9a6a8 Mon Sep 17 00:00:00 2001
+From: Daniel Stenberg <daniel@haxx.se>
+Date: Mon, 31 Jul 2023 10:07:35 +0200
+Subject: [PATCH] urlapi: make sure zoneid is also duplicated in curl_url_dup
+
+Add several curl_url_dup() tests to the general lib1560 test.
+
+Reported-by: Rutger Broekhoff
+Bug: https://curl.se/mail/lib-2023-07/0047.html
+Closes #11549
+
+Conflict: tests/libtest/lib1560.c for context adapt
+Reference: https://github.com/curl/curl/commit/49e244318672c688097c1bf601a110005cd9a6a8
+---
+ lib/urlapi.c | 1 +
+ tests/libtest/lib1560.c | 67 +++++++++++++++++++++++++++++++++++++++++
+ 2 files changed, 68 insertions(+)
+
+diff --git a/lib/urlapi.c b/lib/urlapi.c
+index cd423c335d88f..b1a126d548213 100644
+--- a/lib/urlapi.c
++++ b/lib/urlapi.c
+@@ -1385,6 +1385,7 @@ CURLU *curl_url_dup(const CURLU *in)
+ DUP(u, in, path);
+ DUP(u, in, query);
+ DUP(u, in, fragment);
++ DUP(u, in, zoneid);
+ u->portnum = in->portnum;
+ }
+ return u;
+diff --git a/tests/libtest/lib1560.c b/tests/libtest/lib1560.c
+index 0eca0fda72d0b..ff03bec9391a4 100644
+--- a/tests/libtest/lib1560.c
++++ b/tests/libtest/lib1560.c
+@@ -1672,10 +1672,77 @@ static int huge(void)
+ return error;
+ }
+
++static int urldup(void)
++{
++ const char *url[] = {
++ "http://"
++ "user:pwd@"
++ "[2a04:4e42:e00::347%25eth0]"
++ ":80"
++ "/path"
++ "?query"
++ "#fraggie",
++ "https://example.com",
++ "https://user@example.com",
++ "https://user.pwd@example.com",
++ "https://user.pwd@example.com:1234",
++ "https://example.com:1234",
++ "example.com:1234",
++ "https://user.pwd@example.com:1234/path?query#frag",
++ NULL
++ };
++ CURLU *copy = NULL;
++ char *h_str = NULL, *copy_str = NULL;
++ CURLU *h = curl_url();
++ int i;
++
++ if(!h)
++ goto err;
++
++ for(i = 0; url[i]; i++) {
++ CURLUcode rc = curl_url_set(h, CURLUPART_URL, url[i],
++ CURLU_GUESS_SCHEME);
++ if(rc)
++ goto err;
++ copy = curl_url_dup(h);
++
++ rc = curl_url_get(h, CURLUPART_URL, &h_str, 0);
++ if(rc)
++ goto err;
++
++ rc = curl_url_get(copy, CURLUPART_URL, &copy_str, 0);
++ if(rc)
++ goto err;
++
++ if(strcmp(h_str, copy_str)) {
++ printf("Original: %s\nParsed: %s\nCopy: %s\n",
++ url[i], h_str, copy_str);
++ goto err;
++ }
++ curl_free(copy_str);
++ curl_free(h_str);
++ curl_url_cleanup(copy);
++ copy_str = NULL;
++ h_str = NULL;
++ copy = NULL;
++ }
++ curl_url_cleanup(h);
++ return 0;
++err:
++ curl_free(copy_str);
++ curl_free(h_str);
++ curl_url_cleanup(copy);
++ curl_url_cleanup(h);
++ return 1;
++}
++
+ int test(char *URL)
+ {
+ (void)URL; /* not used */
+
++ if(urldup())
++ return 11;
++
+ if(get_url())
+ return 3;
+