Skip to content
This repository has been archived by the owner on Jun 20, 2022. It is now read-only.

Commit

Permalink
Support resumption of partial downloads
Browse files Browse the repository at this point in the history
* Adds support only for the Curl backend, which is the default anyway.

* In order to distinguish between a file that has been fully downloaded
(but not used yet) and should therefore be hash-checked vs. those that
require more data, store partials with a .partial extension.

* Adds a simple http-server to rustup-mock, to allow the download module
to be properly tested. It's not clear how to easily emulate a server
that stops half-way without that. The tests for the overall
download-resumption functionality should be fairly re-usable if we migrate
to another download solution in the future (e.g. in rust-lang#993)

* Don't bother with resumption for meta-data files, since they're likely
to go out of date anyway.
  • Loading branch information
jelford committed Mar 20, 2017
1 parent 2ce8d72 commit e5e3a15
Show file tree
Hide file tree
Showing 11 changed files with 567 additions and 220 deletions.
38 changes: 38 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 4 additions & 0 deletions src/download/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,10 @@ curl = { version = "0.4", optional = true }
lazy_static = { version = "0.2", optional = true }
native-tls = { version = "0.1", optional = true }

[dev-dependencies]
rustup-mock = { path = "../rustup-mock", version = "1.0.0" }
tempdir = "0.3.4"

[dependencies.hyper]
version = "0.9.8"
default-features = false
Expand Down
6 changes: 5 additions & 1 deletion src/download/src/errors.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
use std::io;

error_chain! {
links { }

foreign_links { }
foreign_links {
Io(::std::io::Error) #[cfg(unix)];
}

errors {
HttpStatus(e: u32) {
Expand Down
103 changes: 74 additions & 29 deletions src/download/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,47 +33,86 @@ const BACKENDS: &'static [Backend] = &[
Backend::Rustls
];

pub fn download(url: &Url,
callback: &Fn(Event) -> Result<()>)
-> Result<()> {
for &backend in BACKENDS {
match download_with_backend(backend, url, callback) {
Err(Error(ErrorKind::BackendUnavailable(_), _)) => (),
Err(e) => return Err(e),
Ok(()) => return Ok(()),
}
}

Err("no working backends".into())
}

pub fn download_with_backend(backend: Backend,
fn download_with_backend(backend: Backend,
url: &Url,
resume_from: u64,
callback: &Fn(Event) -> Result<()>)
-> Result<()> {
match backend {
Backend::Curl => curl::download(url, callback),
Backend::Curl => curl::download(url, resume_from, callback),
Backend::Hyper => hyper::download(url, callback),
Backend::Rustls => rustls::download(url, callback),
}
}

fn supports_partial_download(backend: &Backend) -> bool {
match backend {
&Backend::Curl => true,
_ => false
}
}

pub fn download_to_path_with_backend(
backend: Backend,
url: &Url,
path: &Path,
resume_from_partial: bool,
callback: Option<&Fn(Event) -> Result<()>>)
-> Result<()>
{
use std::cell::RefCell;
use std::fs::{self, File};
use std::io::Write;
use std::fs::{self, File, OpenOptions};
use std::io::{Read, Write, Seek, SeekFrom};

|| -> Result<()> {
let file = RefCell::new(try!(File::create(&path).chain_err(
|| "error creating file for download")));
let (file, resume_from) = if resume_from_partial && supports_partial_download(&backend) {
let mut possible_partial = OpenOptions::new()
.read(true)
.open(&path);

let downloaded_so_far = if let Ok(mut partial) = possible_partial {
if let Some(cb) = callback {
println!("Reading file in {}", path.display());
let mut buf = vec![0; 1024*1024*10];
let mut downloaded_so_far = 0;
let mut number_of_reads = 0;
loop {
let n = try!(partial.read(&mut buf));
downloaded_so_far += n as u64;
number_of_reads += 1;
if n == 0 {
println!("nothing read after {} reads (accumulated {})", number_of_reads, downloaded_so_far);
break;
}
try!(cb(Event::DownloadDataReceived(&buf[..n])));
}
downloaded_so_far
} else {
use std::fs::Metadata;
let file_info = try!(partial.metadata());
file_info.len()
}
} else {
0
};

let mut possible_partial = try!(OpenOptions::new().write(true).create(true).open(&path).chain_err(|| "error opening file for download"));
try!(possible_partial.seek(SeekFrom::End(0)));

(possible_partial, downloaded_so_far)
} else {
println!("Download resume not supported");
(try!(OpenOptions::new()
.write(true)
.create(true)
.open(&path)
.chain_err(|| "error creating file for download")), 0)
};

try!(download_with_backend(backend, url, &|event| {
let file = RefCell::new(file);

try!(download_with_backend(backend, url, resume_from, &|event| {
if let Event::DownloadDataReceived(data) = event {
try!(file.borrow_mut().write_all(data)
.chain_err(|| "unable to write download to disk"));
Expand All @@ -89,11 +128,8 @@ pub fn download_to_path_with_backend(

Ok(())
}().map_err(|e| {
if path.is_file() {
// FIXME ignoring compound errors
let _ = fs::remove_file(path);
}

// TODO is there any point clearing up here? What kind of errors will leave us with an unusable partial?
e
})
}
Expand All @@ -114,6 +150,7 @@ pub mod curl {
use super::Event;

pub fn download(url: &Url,
resume_from: u64,
callback: &Fn(Event) -> Result<()> )
-> Result<()> {
// Fetch either a cached libcurl handle (which will preserve open
Expand All @@ -128,6 +165,12 @@ pub mod curl {
try!(handle.url(&url.to_string()).chain_err(|| "failed to set url"));
try!(handle.follow_location(true).chain_err(|| "failed to set follow redirects"));

if resume_from > 0 {
try!(handle.range(&(resume_from.to_string() + "-")).chain_err(|| "setting the range-header for download resumption"));
} else {
try!(handle.range("").chain_err(|| "clearing range header"));
}

// Take at most 30s to connect
try!(handle.connect_timeout(Duration::new(30, 0)).chain_err(|| "failed to set connect timeout"));

Expand All @@ -154,8 +197,8 @@ pub mod curl {
if let Ok(data) = str::from_utf8(header) {
let prefix = "Content-Length: ";
if data.starts_with(prefix) {
if let Ok(s) = data[prefix.len()..].trim().parse() {
let msg = Event::DownloadContentLengthReceived(s);
if let Ok(s) = data[prefix.len()..].trim().parse::<u64>() {
let msg = Event::DownloadContentLengthReceived(s + resume_from);
match callback(msg) {
Ok(()) => (),
Err(e) => {
Expand Down Expand Up @@ -188,10 +231,11 @@ pub mod curl {
}));
}

// If we didn't get a 200 or 0 ("OK" for files) then return an error
// If we didn't get a 20x or 0 ("OK" for files) then return an error
let code = try!(handle.response_code().chain_err(|| "failed to get response code"));
if code != 200 && code != 0 {
return Err(ErrorKind::HttpStatus(code).into());
match code {
0 | 200...299 => { println!("status code: {}", code)}
_ => { return Err(ErrorKind::HttpStatus(code).into()); }
}

Ok(())
Expand Down Expand Up @@ -639,6 +683,7 @@ pub mod curl {
use super::Event;

pub fn download(_url: &Url,
_resume_from: u64,
_callback: &Fn(Event) -> Result<()> )
-> Result<()> {
Err(ErrorKind::BackendUnavailable("curl").into())
Expand Down
87 changes: 87 additions & 0 deletions src/download/tests/download-curl-resume.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
#![cfg(feature = "curl-backend")]

extern crate download;
extern crate rustup_mock;
extern crate tempdir;

use std::fs::{self, File};
use std::io::{Read};
use std::path::Path;

use tempdir::TempDir;

use rustup_mock::http_server;

use download::*;

fn setup(test: &Fn(TempDir, http_server::Server) -> ()) {
let tmp = TempDir::new("rustup-download-test-").expect("creating tempdir for test");
let served_dir = &tmp.path().join("test-files");
fs::DirBuilder::new().create(served_dir).expect("setting up a folder to server files from");
let server = http_server::Server::serve_from(served_dir).expect("setting up http server for test");
test(tmp, server);
}

fn file_contents(path: &Path) -> String {
let mut result = String::new();
File::open(&path).unwrap().read_to_string(&mut result).expect("reading test result file");
result
}

#[test]
fn when_download_is_interrupted_partial_file_is_left_on_disk() {
setup(&|tmpdir, mut server| {
let target_path = tmpdir.path().join("downloaded");

server.put_file_from_bytes("test-file", b"12345");

server.stop_after_bytes(3);
download_to_path_with_backend(
Backend::Curl, &server.address().join("test-file").unwrap(), &target_path, true, None)
.expect("Test download failed");

assert_eq!(file_contents(&target_path), "123");
});
}

#[test]
fn download_interrupted_and_resumed() {
setup(&|tmpdir, mut server| {
let target_path = tmpdir.path().join("downloaded");

server.put_file_from_bytes("test-file", b"12345");

server.stop_after_bytes(3);
download_to_path_with_backend(
Backend::Curl, &server.address().join("test-file").unwrap(), &target_path, true, None)
.expect("Test download failed");

server.stop_after_bytes(2);
download_to_path_with_backend(
Backend::Curl, &server.address().join("test-file").unwrap(), &target_path, true, None)
.expect("Test download failed");

assert_eq!(file_contents(&target_path), "12345");
});
}

#[test]
fn resuming_download_with_callback_that_needs_to_read_contents() {
setup(&|tmpdir, mut server| {
let target_path = tmpdir.path().join("downloaded");

server.put_file_from_bytes("test-file", b"12345");

server.stop_after_bytes(3);
download_to_path_with_backend(
Backend::Curl, &server.address().join("test-file").unwrap(), &target_path, true, Some(&|_| {Ok(())}))
.expect("Test download failed");

server.stop_after_bytes(2);
download_to_path_with_backend(
Backend::Curl, &server.address().join("test-file").unwrap(), &target_path, true, Some(&|_| {Ok(())}))
.expect("Test download failed");

assert_eq!(file_contents(&target_path), "12345");
});
}
Loading

0 comments on commit e5e3a15

Please sign in to comment.