Skip to content

Commit

Permalink
Update to rust 1.34.0 (pantsbuild#7541)
Browse files Browse the repository at this point in the history
  • Loading branch information
illicitonion authored Apr 19, 2019
1 parent 9760705 commit 9f3f01a
Show file tree
Hide file tree
Showing 20 changed files with 62 additions and 61 deletions.
2 changes: 1 addition & 1 deletion rust-toolchain
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1.33.0
1.34.0
2 changes: 2 additions & 0 deletions src/rust/engine/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion src/rust/engine/fs/brfs/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ pub fn mount<'a, P: AsRef<Path>>(
// TODO: Work out how to disable caching in the filesystem
let options = ["-o", "ro", "-o", "fsname=brfs", "-o", "noapplexattr"]
.iter()
.map(|o| o.as_ref())
.map(<&str>::as_ref)
.collect::<Vec<&OsStr>>();

debug!("About to spawn_mount with options {:?}", options);
Expand Down
6 changes: 3 additions & 3 deletions src/rust/engine/fs/fs_util/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -418,7 +418,7 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> {
&args
.values_of("globs")
.unwrap()
.map(|s| s.to_string())
.map(str::to_string)
.collect::<Vec<String>>(),
&[],
// By using `Ignore`, we assume all elements of the globs will definitely expand to
Expand Down Expand Up @@ -467,7 +467,7 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> {
.collect::<Vec<String>>()
.join("")
})
.map(|s| s.into_bytes())
.map(String::into_bytes)
}),
"recursive-file-list-with-digests" => expand_files(store, digest).map(|maybe_v| {
maybe_v
Expand All @@ -477,7 +477,7 @@ fn execute(top_match: &clap::ArgMatches<'_>) -> Result<(), ExitError> {
.collect::<Vec<String>>()
.join("")
})
.map(|s| s.into_bytes())
.map(String::into_bytes)
}),
format => Err(format!(
"Unexpected value of --output-format arg: {}",
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/fs/src/glob_matching.rs
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ trait GlobMatchingImplementation<E: Display + Send + Sync + 'static>: VFS<E> {
.and_then(move |path_globs| {
let child_globs = path_globs
.into_iter()
.flat_map(|path_globs| path_globs.into_iter())
.flat_map(Vec::into_iter)
.map(|pg| context.expand_single(result.clone(), exclude.clone(), pg))
.collect::<Vec<_>>();
future::join_all(child_globs)
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/fs/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ impl PathGlob {
let mut symbolic_path_parent = symbolic_path;
if !canonical_dir_parent.0.pop() {
let mut symbolic_path = symbolic_path_parent;
symbolic_path.extend(parts.iter().map(|p| p.as_str()));
symbolic_path.extend(parts.iter().map(Pattern::as_str));
return Err(format!(
"Globs may not traverse outside of the buildroot: {:?}",
symbolic_path,
Expand Down Expand Up @@ -648,7 +648,7 @@ impl PosixFS {
}

pub fn read_link(&self, link: &Link) -> BoxFuture<PathBuf, io::Error> {
let link_parent = link.0.parent().map(|p| p.to_owned());
let link_parent = link.0.parent().map(Path::to_owned);
let link_abs = self.root.0.join(link.0.as_path()).to_owned();
self
.pool
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/fs/src/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ impl Snapshot {
})
.map(move |path_stats_per_directory| {
let mut path_stats =
Iterator::flatten(path_stats_per_directory.into_iter().map(|v| v.into_iter()))
Iterator::flatten(path_stats_per_directory.into_iter().map(Vec::into_iter))
.collect::<Vec<_>>();
path_stats.sort_by(|l, r| l.path().cmp(&r.path()));
Snapshot { digest, path_stats }
Expand Down Expand Up @@ -281,7 +281,7 @@ impl Snapshot {
let unique_count = out_dir
.get_files()
.iter()
.map(|v| v.get_name())
.map(bazel_protos::remote_execution::FileNode::get_name)
.dedup()
.count();
if unique_count != out_dir.get_files().len() {
Expand Down
22 changes: 8 additions & 14 deletions src/rust/engine/fs/src/store.rs
Original file line number Diff line number Diff line change
Expand Up @@ -314,10 +314,12 @@ impl Store {
expanding_futures.push(self.expand_directory(digest));
}
Ok(None) => {
return future::err(format!("Failed to upload digest {:?}: Not found", digest)).to_boxed();
return future::err(format!("Failed to upload digest {:?}: Not found", digest))
.to_boxed();
}
Err(err) => {
return future::err(format!("Failed to upload digest {:?}: {:?}", digest, err)).to_boxed();
return future::err(format!("Failed to upload digest {:?}: {:?}", digest, err))
.to_boxed();
}
};
}
Expand Down Expand Up @@ -471,12 +473,7 @@ impl Store {
future::ok(digest_types).to_boxed()
})
.map(|digest_pairs_per_directory| {
Iterator::flatten(
digest_pairs_per_directory
.into_iter()
.map(|v| v.into_iter()),
)
.collect()
Iterator::flatten(digest_pairs_per_directory.into_iter().map(Vec::into_iter)).collect()
})
.to_boxed()
}
Expand Down Expand Up @@ -579,12 +576,9 @@ impl Store {
.to_boxed()
})
.map(|file_contents_per_directory| {
let mut vec = Iterator::flatten(
file_contents_per_directory
.into_iter()
.map(|v| v.into_iter()),
)
.collect::<Vec<_>>();
let mut vec =
Iterator::flatten(file_contents_per_directory.into_iter().map(Vec::into_iter))
.collect::<Vec<_>>();
vec.sort_by(|l, r| l.path.cmp(&r.path));
vec
})
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/graph/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -656,7 +656,7 @@ impl<N: Node> Graph<N> {
.pg
.neighbors_directed(entry_id, Direction::Outgoing)
.filter_map(|dep_id| inner.entry_for_id(dep_id))
.map(|entry| entry.generation())
.map(Entry::generation)
.collect();
(
inner.entry_for_id(entry_id).cloned(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,13 @@ pub fn verify_directory_canonical(directory: &remote_execution::Directory) -> Re
let file_names: HashSet<&str> = directory
.get_files()
.iter()
.map(|file| file.get_name())
.chain(directory.get_directories().iter().map(|dir| dir.get_name()))
.map(remote_execution::FileNode::get_name)
.chain(
directory
.get_directories()
.iter()
.map(remote_execution::DirectoryNode::get_name),
)
.collect();
if file_names.len() != directory.get_files().len() + directory.get_directories().len() {
return Err(format!(
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/process_execution/src/local.rs
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ impl CommandRunner {
dir_glob.push("/**");
vec![dir, dir_glob]
})
.chain(output_file_paths.into_iter().map(|p| p.into_os_string()))
.chain(output_file_paths.into_iter().map(PathBuf::into_os_string))
.map(|s| {
s.into_string()
.map_err(|e| format!("Error stringifying output paths: {:?}", e))
Expand Down
29 changes: 14 additions & 15 deletions src/rust/engine/process_execution/src/remote.rs
Original file line number Diff line number Diff line change
Expand Up @@ -242,7 +242,7 @@ impl super::CommandRunner for CommandRunner {
// maybe the delay here should be the min of remaining time and the backoff period
Delay::new_handle(
Instant::now() + Duration::from_millis(backoff_period),
futures_timer_thread.with(|thread| thread.handle()),
futures_timer_thread.with(futures_timer::HelperThread::handle),
)
.map_err(move |e| {
format!(
Expand Down Expand Up @@ -501,18 +501,17 @@ impl CommandRunner {
)))
.to_boxed();
}
let digest =
Digest(
try_future!(Fingerprint::from_hex_string(parts[1]).map_err(|e| {
ExecutionError::Fatal(format!("Bad digest in missing blob: {}: {}", parts[1], e))
})),
try_future!(parts[2]
.parse::<usize>()
.map_err(|e| ExecutionError::Fatal(format!(
"Missing blob had bad size: {}: {}",
parts[2], e
)))),
);
let digest = Digest(
try_future!(Fingerprint::from_hex_string(parts[1]).map_err(|e| {
ExecutionError::Fatal(format!("Bad digest in missing blob: {}: {}", parts[1], e))
})),
try_future!(parts[2]
.parse::<usize>()
.map_err(|e| ExecutionError::Fatal(format!(
"Missing blob had bad size: {}: {}",
parts[2], e
)))),
);
missing_digests.push(digest);
}
if missing_digests.is_empty() {
Expand Down Expand Up @@ -763,7 +762,7 @@ fn make_execute_request(
.iter()
.map(|p| {
p.to_str()
.map(|s| s.to_owned())
.map(str::to_owned)
.ok_or_else(|| format!("Non-UTF8 output file path: {:?}", p))
})
.collect::<Result<Vec<String>, String>>()?;
Expand All @@ -775,7 +774,7 @@ fn make_execute_request(
.iter()
.map(|p| {
p.to_str()
.map(|s| s.to_owned())
.map(str::to_owned)
.ok_or_else(|| format!("Non-UTF8 output directory path: {:?}", p))
})
.collect::<Result<Vec<String>, String>>()?;
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/process_executor/src/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,7 @@ fn main() {
let argv: Vec<String> = args
.values_of("argv")
.unwrap()
.map(|v| v.to_string())
.map(str::to_string)
.collect();
let env: BTreeMap<String, String> = match args.values_of("env") {
Some(values) => values
Expand Down Expand Up @@ -254,7 +254,7 @@ fn main() {
// TODO: Take a command line arg.
fs::BackoffConfig::new(Duration::from_secs(1), 1.2, Duration::from_secs(20)).unwrap(),
3,
timer_thread.with(|t| t.handle()),
timer_thread.with(futures_timer::HelperThread::handle),
)
}
(None, None) => fs::Store::local_only(local_store_path, pool.clone()),
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ impl Core {
fs::BackoffConfig::new(Duration::from_millis(10), 1.0, Duration::from_millis(10))
.unwrap(),
remote_store_rpc_retries,
futures_timer_thread2.with(|t| t.handle()),
futures_timer_thread2.with(futures_timer::HelperThread::handle),
)
}
})
Expand Down
4 changes: 2 additions & 2 deletions src/rust/engine/src/externs.rs
Original file line number Diff line number Diff line change
Expand Up @@ -608,7 +608,7 @@ pub struct Buffer {

impl Buffer {
pub fn to_bytes(&self) -> Vec<u8> {
with_vec(self.bytes_ptr, self.bytes_len as usize, |vec| vec.clone())
with_vec(self.bytes_ptr, self.bytes_len as usize, Vec::clone)
}

pub fn to_os_string(&self) -> OsString {
Expand Down Expand Up @@ -637,7 +637,7 @@ pub struct BufferBuffer {
impl BufferBuffer {
pub fn to_bytes_vecs(&self) -> Vec<Vec<u8>> {
with_vec(self.bufs_ptr, self.bufs_len as usize, |vec| {
vec.iter().map(|b| b.to_bytes()).collect()
vec.iter().map(Buffer::to_bytes).collect()
})
}

Expand Down
1 change: 1 addition & 0 deletions src/rust/engine/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,7 @@ pub extern "C" fn scheduler_create(
string: type_string,
bytes: type_bytes,
};
#[allow(clippy::redundant_closure)] // I couldn't find an easy way to remove this closure.
let mut tasks = with_tasks(tasks_ptr, |tasks| tasks.clone());
tasks.intrinsics_set(&types);
// Allocate on the heap via `Box` and return a raw pointer to the boxed value.
Expand Down
20 changes: 10 additions & 10 deletions src/rust/engine/src/nodes.rs
Original file line number Diff line number Diff line change
Expand Up @@ -605,8 +605,8 @@ impl DownloadedFile {
) -> BoxFuture<fs::Snapshot, String> {
let file_name = try_future!(url
.path_segments()
.and_then(|ps| ps.last())
.map(|f| f.to_owned())
.and_then(Iterator::last)
.map(str::to_owned)
.ok_or_else(|| format!("Error getting the file name from the parsed URL: {}", url)));

core
Expand Down Expand Up @@ -1052,14 +1052,14 @@ impl Node for NodeKey {
};
let context2 = context.clone();
match self {
NodeKey::DigestFile(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::DownloadedFile(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::ExecuteProcess(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::ReadLink(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::Scandir(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::Select(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::Snapshot(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::Task(n) => n.run(context).map(|v| v.into()).to_boxed(),
NodeKey::DigestFile(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::DownloadedFile(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::ExecuteProcess(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::ReadLink(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::Scandir(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::Select(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::Snapshot(n) => n.run(context).map(NodeResult::from).to_boxed(),
NodeKey::Task(n) => n.run(context).map(NodeResult::from).to_boxed(),
}
.inspect(move |_: &NodeResult| {
if let Some((node_name, start_timestamp)) = node_name_and_start_timestamp {
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/src/rule_graph.rs
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ impl<'t> GraphMaker<'t> {
.collect();
let unfulfillable_discovered_during_construction: HashSet<_> = full_unfulfillable_rules
.keys()
.filter_map(|f| f.task_rule())
.filter_map(EntryWithDeps::task_rule)
.cloned()
.collect();
self
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/src/scheduler.rs
Original file line number Diff line number Diff line change
Expand Up @@ -302,7 +302,7 @@ impl Scheduler {
.roots
.clone()
.into_iter()
.map(|s| s.into())
.map(NodeKey::from)
.collect();

// Lock the display for the remainder of the execution, and grab a reference to it.
Expand Down
2 changes: 1 addition & 1 deletion src/rust/engine/testutil/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ pub mod data;
pub mod file;

pub fn owned_string_vec(args: &[&str]) -> Vec<String> {
args.iter().map(|s| s.to_string()).collect()
args.iter().map(<&str>::to_string).collect()
}

pub fn as_byte_owned_vec(str: &str) -> Vec<u8> {
Expand Down

0 comments on commit 9f3f01a

Please sign in to comment.