[sr-dev] git:master:f12e03e1: curl Enforce max data size when downloading data. Stop re-allocating memory for data we do not bother with.
Olle E. Johansson
oej at edvina.net
Mon Nov 2 11:45:23 CET 2015
Module: kamailio
Branch: master
Commit: f12e03e1c171a723097eb14f1074d705ed1404e4
URL: https://github.com/kamailio/kamailio/commit/f12e03e1c171a723097eb14f1074d705ed1404e4
Author: Olle E. Johansson <oej at edvina.net>
Committer: Olle E. Johansson <oej at edvina.net>
Date: 2015-11-02T11:44:49+01:00
curl Enforce max data size when downloading data. Stop re-allocating memory for data we do not bother with.
---
Modified: modules/curl/curl.h
Modified: modules/curl/functions.c
---
Diff: https://github.com/kamailio/kamailio/commit/f12e03e1c171a723097eb14f1074d705ed1404e4.diff
Patch: https://github.com/kamailio/kamailio/commit/f12e03e1c171a723097eb14f1074d705ed1404e4.patch
---
diff --git a/modules/curl/curl.h b/modules/curl/curl.h
index b83438c..8f5aba9 100644
--- a/modules/curl/curl.h
+++ b/modules/curl/curl.h
@@ -55,7 +55,8 @@ typedef struct {
char *buf;
size_t curr_size;
size_t pos;
-} http_res_stream_t;
+ size_t max_size;
+} curl_res_stream_t;
/*! Predefined connection objects */
@@ -76,7 +77,7 @@ typedef struct _curl_con
unsigned int port; /*!< The port to connect to */
int timeout; /*!< Timeout for this connection */
long maxdatasize; /*!< Maximum data download on GET or POST */
- http_res_stream_t *stream; /*!< Curl stream */
+ curl_res_stream_t *stream; /*!< Curl stream */
struct _curl_con *next; /*!< next connection */
char redirecturl[512]; /*!< Last redirect URL - to use for $curlredirect(curlcon) pv */
} curl_con_t;
diff --git a/modules/curl/functions.c b/modules/curl/functions.c
index 19908c6..6925c71 100644
--- a/modules/curl/functions.c
+++ b/modules/curl/functions.c
@@ -60,26 +60,26 @@ static int curL_query_url(struct sip_msg* _m, char* _url, char* _dst, const char
*/
size_t write_function( void *ptr, size_t size, size_t nmemb, void *stream_ptr)
{
- /* A question here is if we can somehow signal maxdatasize and stop filling
- buffers at maxdatasize - we don't need any more. Or just ignore and stop
- allocating pkg memory at that point. A good todo.
- */
- http_res_stream_t *stream = (http_res_stream_t *) stream_ptr;
-
- stream->buf = (char *) pkg_realloc(stream->buf, stream->curr_size +
- (size * nmemb) + 1);
-
- if (stream->buf == NULL) {
- LM_ERR("cannot allocate memory for stream\n");
- return CURLE_WRITE_ERROR;
- }
+ curl_res_stream_t *stream = (curl_res_stream_t *) stream_ptr;
+
+
+ if (stream->max_size == 0 || stream->curr_size < stream->max_size) {
+ stream->buf = (char *) pkg_realloc(stream->buf, stream->curr_size + (size * nmemb) + 1);
- memcpy(&stream->buf[stream->pos], (char *) ptr, (size * nmemb));
+ if (stream->buf == NULL) {
+ LM_ERR("cannot allocate memory for stream\n");
+ return CURLE_WRITE_ERROR;
+ }
- stream->curr_size += ((size * nmemb) + 1);
- stream->pos += (size * nmemb);
+ memcpy(&stream->buf[stream->pos], (char *) ptr, (size * nmemb));
- stream->buf[stream->pos + 1] = '\0';
+ stream->curr_size += ((size * nmemb) + 1);
+ stream->pos += (size * nmemb);
+
+ stream->buf[stream->pos + 1] = '\0';
+ } else {
+ LM_DBG("****** ##### CURL Max datasize exceeded: max %u current %u\n", (unsigned int) stream->max_size, (unsigned int)stream->curr_size);
+ }
return size * nmemb;
}
@@ -93,7 +93,7 @@ static int curL_query_url(struct sip_msg* _m, char* _url, char* _dst, const char
CURLcode res;
str value;
char *url, *at = NULL;
- http_res_stream_t stream;
+ curl_res_stream_t stream;
long stat;
pv_spec_t *dst;
pv_value_t val;
@@ -101,7 +101,8 @@ static int curL_query_url(struct sip_msg* _m, char* _url, char* _dst, const char
double total_time;
struct curl_slist *headerlist = NULL;
- memset(&stream, 0, sizeof(http_res_stream_t));
+ memset(&stream, 0, sizeof(curl_res_stream_t));
+ stream.max_size = (size_t) maxdatasize;
value.s = _url;
value.len = strlen(_url);
More information about the sr-dev
mailing list