Peteris Krumins added CURLOPT_COOKIELIST and CURLINFO_COOKIELIST, which is a
simple interface to extracting and setting cookies in libcurl's internal
"cookie jar". See the new cookie_interface.c example code.
diff --git a/lib/cookie.c b/lib/cookie.c
index 019c00b..00ea0d6 100644
--- a/lib/cookie.c
+++ b/lib/cookie.c
@@ -85,6 +85,9 @@
 #include <stdlib.h>
 #include <string.h>
 
+#define _MPRINTF_REPLACE /* without this on windows OS we get undefined reference to snprintf */
+#include <curl/mprintf.h>
+
 #include "urldata.h"
 #include "cookie.h"
 #include "strequal.h"
@@ -816,6 +819,34 @@
    }
 }
 
+/* get_netscape_format()
+ *
+ * Formats a string for Netscape output file, w/o a newline at the end.
+ *
+ * Function returns a char * to a formatted line. Has to be free()d
+*/
+static char *get_netscape_format(const struct Cookie *co)
+{
+   return aprintf(
+     "%s%s\t" /* domain */
+     "%s\t"   /* tailmatch */
+     "%s\t"   /* path */
+     "%s\t"   /* secure */
+     "%u\t"   /* expires */
+     "%s\t"   /* name */
+     "%s",    /* value */
+     /* Make sure all domains are prefixed with a dot if they allow
+        tailmatching. This is Mozilla-style. */
+     (co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
+     co->domain?co->domain:"unknown",
+     co->tailmatch?"TRUE":"FALSE",
+     co->path?co->path:"/",
+     co->secure?"TRUE":"FALSE",
+     (unsigned int)co->expires,
+     co->name,
+     co->value?co->value:"");
+}
+
 /*
  * Curl_cookie_output()
  *
@@ -847,6 +878,8 @@
   }
 
   if(c) {
+    char *format_ptr;
+
     fputs("# Netscape HTTP Cookie File\n"
           "# http://www.netscape.com/newsref/std/cookie_spec.html\n"
           "# This file was generated by libcurl! Edit at your own risk.\n\n",
@@ -854,26 +887,13 @@
     co = c->cookies;
 
     while(co) {
-      fprintf(out,
-              "%s%s\t" /* domain */
-              "%s\t" /* tailmatch */
-              "%s\t" /* path */
-              "%s\t" /* secure */
-              "%u\t" /* expires */
-              "%s\t" /* name */
-              "%s\n", /* value */
-
-              /* Make sure all domains are prefixed with a dot if they allow
-                 tailmatching. This is Mozilla-style. */
-              (co->tailmatch && co->domain && co->domain[0] != '.')? ".":"",
-              co->domain?co->domain:"unknown",
-              co->tailmatch?"TRUE":"FALSE",
-              co->path?co->path:"/",
-              co->secure?"TRUE":"FALSE",
-              (unsigned int)co->expires,
-              co->name,
-              co->value?co->value:"");
-
+      format_ptr = get_netscape_format(co);
+      if (format_ptr == NULL) {
+        fprintf(out, "#\n# Fatal libcurl error\n");
+        return 1;
+      }
+      fprintf(out, "%s\n", format_ptr);
+      free(format_ptr);
       co=co->next;
     }
   }
@@ -884,4 +904,34 @@
   return 0;
 }
 
+struct curl_slist *Curl_cookie_list(struct SessionHandle *data)
+{
+   struct curl_slist *list = NULL;
+   struct curl_slist *beg;
+   struct Cookie *c;
+   char *line;
+
+   if (data->cookies == NULL) return NULL;
+   if (data->cookies->numcookies == 0) return NULL;
+
+   c = data->cookies->cookies;
+
+   beg = list;
+   while (c) {
+     /* fill the list with _all_ the cookies we know */
+     line = get_netscape_format(c);
+     if (line == NULL) {
+       /* get_netscape_format returns null only if we run out of memory */
+
+       curl_slist_free_all(beg); /* free some memory */
+       return NULL;
+     }
+     list = curl_slist_append(list, line);
+     free(line);
+     c = c->next;
+   }
+
+   return list;
+}
+
 #endif /* CURL_DISABLE_HTTP || CURL_DISABLE_COOKIES */