fix(lib): Support large body uploads with repeated read callbacks
When uploading larger amounts of data, cURL will call the read callback incrementally multiple times to receive all the expected data. Previously if the size of the data to upload exceeded the size of the initial buffer provided by cURL, the write (and thus the request) would fail. This changes the logic to write the data in chunks of a size that are acceptable to cURL.
This commit is contained in:
parent
a18dfc5a5e
commit
67bfba446d
2 changed files with 42 additions and 12 deletions
18
src/tests.rs
18
src/tests.rs
|
|
@ -114,6 +114,24 @@ fn test_basic_auth() {
|
|||
assert!(response.is_success(), "authorized request should succeed");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_large_body() {
|
||||
// By default cURL buffers seem to be 2^16 bytes in size. The test
|
||||
// size is therefore 2^16+1.
|
||||
const BODY_SIZE: usize = 65537;
|
||||
|
||||
let resp = Request::post("http://127.0.0.1:4662/post")
|
||||
.body("application/octet-stream", &[0; BODY_SIZE])
|
||||
.send().expect("sending request")
|
||||
.as_json::<Value>().expect("JSON deserialisation");
|
||||
|
||||
// httpbin returns the uploaded data as a string in the `data`
|
||||
// field.
|
||||
let data = resp.body.get("data").unwrap().as_str().unwrap();
|
||||
|
||||
assert_eq!(BODY_SIZE, data.len(), "uploaded data length should be correct");
|
||||
}
|
||||
|
||||
// Tests for various other features.
|
||||
|
||||
#[test]
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue