cURL / Mailing Lists / curl-library / Single Mail

curl-library

unexpected output in error buffer & correct gdiff

From: Lucas Adamski <wakked1_at_yahoo.com>
Date: Sun, 7 Oct 2001 10:09:47 -0700 (PDT)

Occasionally (with no discernable corresponding pattern) I get the HTTP req
in the error buffer field (set by CURLOPT_ERRORBUFFER), like so:

GET / HTTP/1.1^M
User-Agent: netscape^M
Host: 192.168.0.1:8180^M
Pragma: no-cache^M
Accept: image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, */*^M

This happens sporadically about every 100 attempts or so. These are separate
processes querying only 1-3 urls right now, so I don't think its a memory
corruption issue. It returns a 200 OK though, so the error buffer should be
empty. In all other requests it behaves normally (i.e. blank if no error, or
it contains the appropriate error msg). Any ideas? Thanks,
  Lucas.

Oh, here's the proper gdiff for the CURLOPT_MAXDOWNLOAD hack... sorry, I did
it backwards last time. :)

diff -u ../curl-7.9-pre3/lib/transfer.c lib/transfer.c
--- ../curl-7.9-pre3/lib/transfer.c Thu Aug 30 22:54:04 2001
+++ lib/transfer.c Sun Oct 7 04:54:24 2001
@@ -213,7 +213,8 @@
   struct connectdata *conn = (struct connectdata *)c_conn;
   char *buf;
   int maxfd;
-
+ char maxrange[40]="";
+
   data = conn->data; /* there's the root struct */
   buf = data->state.buffer;
   maxfd = (conn->sockfd>conn->writesockfd?conn->sockfd:conn->writesockfd)+1;
@@ -468,14 +469,17 @@
                    */
                   if(data->set.no_body)
                     return CURLE_OK;
-
                   if(!conn->bits.close) {
                     /* If this is not the last request before a close, we
must
                        set the maximum download size to the size of the
                        expected document or else, we won't know when to stop
                        reading! */
                     if(-1 != conn->size)
- conn->maxdownload = conn->size;
+ if(conn->maxdownload == -1 || conn->maxdownload >
conn->size) {
+ infof(data, "Resetting max download from %ld to %ld\n",
conn->maxdownload, conn->size);
+ conn->maxdownload = conn->size;
+ } else if(conn->maxdownload != -1 && conn->maxdownload < conn->size)
+ conn->size = conn->maxdownload;
 
                     /* If max download size is *zero* (nothing) we already
                        have nothing and can safely return ok now! */
@@ -557,8 +561,19 @@
               /* check for Content-Length: header lines to get size */
               if (strnequal("Content-Length:", p, 15) &&
                   sscanf (p+15, " %ld", &contentlength)) {
- conn->size = contentlength;
- Curl_pgrsSetDownloadSize(data, contentlength);
+ if((conn->maxdownload!=-1) && (contentlength > conn->maxdownload)) {
+ if (conn->bits.rangestringalloc == TRUE)
+ free(conn->range);
+ snprintf(maxrange, sizeof(maxrange), "0-%ld",
conn->maxdownload);
+
+ /* tell ourselves to fetch this range */
+ conn->range = strdup(maxrange);
+ conn->bits.use_range = TRUE; /* enable range download */
+ conn->bits.rangestringalloc = TRUE; /* mark range string
allocated */
+ contentlength=conn->maxdownload;
+ }
+ conn->size = contentlength;
+ Curl_pgrsSetDownloadSize(data, conn->size);
               }
               else if((httpversion == 10) &&
                       conn->bits.httpproxy &&
@@ -755,7 +770,7 @@
                 Curl_httpchunk_read(conn, str, nread, &nread);
 
               if(CHUNKE_OK < res) {
- failf(data, "Receeived problem in the chunky parser");
+ failf(data, "Receive problem in the chunk parser");
                 return CURLE_READ_ERROR;
               }
               else if(CHUNKE_STOP == res) {
diff -u ../curl-7.9-pre3/lib/url.c lib/url.c
--- ../curl-7.9-pre3/lib/url.c Mon Sep 3 07:01:49 2001
+++ lib/url.c Sun Oct 7 16:58:28 2001
@@ -239,6 +239,8 @@
 
     /* Set the default size of the SSL session ID cache */
     data->set.ssl.numsessions = 5;
+
+ data->set.maxdownload = -1;
 
     /* create an array with connection data struct pointers */
     data->state.numconnects = 5; /* hard-coded right now */
@@ -338,6 +340,12 @@
       }
     }
     break;
+ case CURLOPT_MAXDOWNLOAD:
+ /*
+ * Hard limit on download size, not pretty, use with care
+ */
+ data->set.maxdownload = va_arg(param, long);
+ break;
   case CURLOPT_FORBID_REUSE:
     /*
      * When this transfer is done, it must not be left to be reused by a
@@ -1217,7 +1225,7 @@
             failf(data, "Insufficient kernel memory was available: %d",
errno);
             break;
           default:
- failf(data,"errno %d\n");
+ failf(data,"errno %d\n", errno);
           } /* end of switch */
         
           return CURLE_HTTP_PORT_FAILED;
@@ -1404,7 +1412,8 @@
   conn->bits.proxy_user_passwd = data->set.proxyuserpwd?1:0;
 
   /* maxdownload must be -1 on init, as 0 is a valid value! */
- conn->maxdownload = -1; /* might have been used previously! */
+ conn->maxdownload = data->set.maxdownload; /* might have been used
previously! */
+ infof(data, "Setting download limit to %ld\n", conn->maxdownload);
 
   /* Store creation time to help future close decision making */
   conn->created = Curl_tvnow();
@@ -2106,7 +2115,8 @@
     conn->bits.reuse = TRUE; /* yes, we're re-using here */
     conn->bits.chunk = FALSE; /* always assume not chunked unless told
                                  otherwise */
- conn->maxdownload = -1; /* might have been used previously! */
+ conn->maxdownload = data->set.maxdownload;
+ infof(data, "Resetting download limit to %ld\n", conn->maxdownload);
 
     free(old_conn); /* we don't need this anymore */
 
diff -u ../curl-7.9-pre3/lib/urldata.h lib/urldata.h
--- ../curl-7.9-pre3/lib/urldata.h Sun Oct 7 02:04:11 2001
+++ lib/urldata.h Sun Oct 7 02:56:54 2001
@@ -504,6 +504,7 @@
   long infilesize; /* size of file to upload, -1 means unknown */
   long low_speed_limit; /* bytes/second */
   long low_speed_time; /* number of seconds */
+ long maxdownload; /* hard limit on maximum size of download */
   int set_resume_from; /* continue [ftp] transfer from here */
   char *cookie; /* HTTP cookie string to send */
   struct curl_slist *headers; /* linked list of extra headers */

__________________________________________________
Do You Yahoo!?
NEW from Yahoo! GeoCities - quick and easy web site hosting, just $8.95/month.
http://geocities.yahoo.com/ps/info1
Received on 2001-10-08