mirror of
https://git.tartarus.org/simon/putty.git
synced 2025-01-09 17:38:00 +00:00
HTTP proxy: support 'Transfer-encoding: chunked'.
I had a report that the Windows free-as-in-beer proxy tool 'FreeProxy' didn't work with the new HTTP proxy code, and it turns out that the first reason why not is that the error-document in its 407 response is sent via chunked transfer encoding, which is to say, instead of an up-front Content-length header, you receive a sequence of chunks each prefixed with a hex length. (In 0.76, before the rewritten proxy support, we never even noticed this because we sent Basic auth details up front in our first attempt, rather than attempting a no-auth connection first and waiting to see what kind of auth the proxy asks us for. So we'd only ever see a 407 if the auth details were refused - and since 0.76 didn't have interactive proxy auth prompts, there was nothing we could do at that point but abort immediately, without attempting to parse the rest of the 407 at all.) Now we spot the Transfer-encoding header and successfully parse chunked transfers. Happily, we don't need to worry about the further transfer-encodings such as 'gzip', because we're not actually _using_ the error document - we only have to skip over it to find the end of the HTTP response. This still doesn't make PuTTY work with FreeProxy, because there are further problems hiding behind that one, which I'll fix in following commits.
This commit is contained in:
parent
445f9de129
commit
5c9a43f478
80
proxy/http.c
80
proxy/http.c
@ -72,7 +72,8 @@ typedef struct HttpProxyNegotiator {
|
||||
uint32_t nonce_count;
|
||||
prompts_t *prompts;
|
||||
int username_prompt_index, password_prompt_index;
|
||||
size_t content_length;
|
||||
size_t content_length, chunk_length;
|
||||
bool chunked_transfer;
|
||||
ProxyNegotiator pn;
|
||||
} HttpProxyNegotiator;
|
||||
|
||||
@ -151,6 +152,7 @@ static void proxy_http_free(ProxyNegotiator *pn)
|
||||
#define HTTP_HEADER_LIST(X) \
|
||||
X(HDR_CONNECTION, "Connection") \
|
||||
X(HDR_CONTENT_LENGTH, "Content-Length") \
|
||||
X(HDR_TRANSFER_ENCODING, "Transfer-Encoding") \
|
||||
X(HDR_PROXY_AUTHENTICATE, "Proxy-Authenticate") \
|
||||
/* end of list */
|
||||
|
||||
@ -460,6 +462,7 @@ static void proxy_http_process_queue(ProxyNegotiator *pn)
|
||||
crReturnV;
|
||||
|
||||
s->content_length = 0;
|
||||
s->chunked_transfer = false;
|
||||
s->connection_close = false;
|
||||
|
||||
/*
|
||||
@ -530,6 +533,23 @@ static void proxy_http_process_queue(ProxyNegotiator *pn)
|
||||
if (!get_token(s))
|
||||
continue;
|
||||
s->content_length = strtoumax(s->token->s, NULL, 10);
|
||||
} else if (hdr == HDR_TRANSFER_ENCODING) {
|
||||
/*
|
||||
* The Transfer-Encoding header value should be a
|
||||
* comma-separated list of keywords including
|
||||
* "chunked", "deflate" and "gzip". We parse it in the
|
||||
* most superficial way, by just looking for "chunked"
|
||||
* and ignoring everything else.
|
||||
*
|
||||
* It's OK to do that because we're not actually
|
||||
* _using_ the error document - we only have to skip
|
||||
* over it to find the end of the HTTP response. So we
|
||||
* don't care if it's gzipped or not.
|
||||
*/
|
||||
while (get_token(s)) {
|
||||
if (!stricmp(s->token->s, "chunked"))
|
||||
s->chunked_transfer = true;
|
||||
}
|
||||
} else if (hdr == HDR_CONNECTION) {
|
||||
if (!get_token(s))
|
||||
continue;
|
||||
@ -584,8 +604,62 @@ static void proxy_http_process_queue(ProxyNegotiator *pn)
|
||||
} while (s->header->len > 0);
|
||||
|
||||
/* Read and ignore the entire response document */
|
||||
crMaybeWaitUntilV(bufchain_try_consume(
|
||||
pn->input, s->content_length));
|
||||
if (!s->chunked_transfer) {
|
||||
/* Simple approach: read exactly Content-Length bytes */
|
||||
crMaybeWaitUntilV(bufchain_try_consume(
|
||||
pn->input, s->content_length));
|
||||
} else {
|
||||
/* Chunked transfer: read a sequence of
|
||||
* <hex length>\r\n<data>\r\n chunks, terminating in one with
|
||||
* zero length */
|
||||
do {
|
||||
/*
|
||||
* Expect a chunk length
|
||||
*/
|
||||
s->chunk_length = 0;
|
||||
while (true) {
|
||||
char c;
|
||||
crMaybeWaitUntilV(bufchain_try_fetch_consume(
|
||||
pn->input, &c, 1));
|
||||
if (c == '\r') {
|
||||
continue;
|
||||
} else if (c == '\n') {
|
||||
break;
|
||||
} else if ('0' <= c && c <= '9') {
|
||||
s->chunk_length = s->chunk_length*16 + (c-'0');
|
||||
} else if ('A' <= c && c <= 'F') {
|
||||
s->chunk_length = s->chunk_length*16 + (c-'A'+10);
|
||||
} else if ('a' <= c && c <= 'f') {
|
||||
s->chunk_length = s->chunk_length*16 + (c-'a'+10);
|
||||
} else {
|
||||
pn->error = dupprintf(
|
||||
"Received bad character 0x%02X in chunk length "
|
||||
"during HTTP chunked transfer encoding",
|
||||
(unsigned)(unsigned char)c);
|
||||
crStopV;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Expect that many bytes of chunked data
|
||||
*/
|
||||
crMaybeWaitUntilV(bufchain_try_consume(
|
||||
pn->input, s->chunk_length));
|
||||
|
||||
/* Now expect \r\n */
|
||||
{
|
||||
char buf[2];
|
||||
crMaybeWaitUntilV(bufchain_try_fetch_consume(
|
||||
pn->input, buf, 2));
|
||||
if (memcmp(buf, "\r\n", 2)) {
|
||||
pn->error = dupprintf(
|
||||
"Missing CRLF after chunk "
|
||||
"during HTTP chunked transfer encoding");
|
||||
crStopV;
|
||||
}
|
||||
}
|
||||
} while (s->chunk_length);
|
||||
}
|
||||
|
||||
if (200 <= s->http_status && s->http_status < 300) {
|
||||
/* Any 2xx HTTP response means we're done */
|
||||
|
Loading…
Reference in New Issue
Block a user