Skip to content

Commit

Permalink
Provide a way to detect type of manifest files and general refactor f…
Browse files Browse the repository at this point in the history
…or workspace builds (FuelLabs#3129)

closes FuelLabs#3127.
  • Loading branch information
kayagokalp authored Oct 26, 2022
1 parent f63973a commit de60a83
Show file tree
Hide file tree
Showing 6 changed files with 157 additions and 83 deletions.
49 changes: 47 additions & 2 deletions forc-pkg/src/manifest.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,37 @@ use sway_core::{language::parsed::TreeType, parse};
pub use sway_types::ConfigTimeConstant;
use sway_utils::constants;

pub enum ManifestFile {
Package(Box<PackageManifestFile>),
Workspace(WorkspaceManifestFile),
}

impl ManifestFile {
/// Returns a `PackageManifestFile` if the path is within a package directory, otherwise
/// returns a `WorkspaceManifestFile` if within a workspace directory.
pub fn from_dir(manifest_dir: &Path) -> Result<Self> {
if let Ok(package_manifest) = PackageManifestFile::from_dir(manifest_dir) {
Ok(ManifestFile::Package(Box::new(package_manifest)))
} else if let Ok(workspace_manifest) = WorkspaceManifestFile::from_dir(manifest_dir) {
Ok(ManifestFile::Workspace(workspace_manifest))
} else {
bail!("Cannot find a valid `Forc.toml` at {:?}", manifest_dir)
}
}

/// Returns a `PackageManifestFile` if the path is pointing to package manifest, otherwise
/// returns a `WorkspaceManifestFile` if it is pointing to a workspace manifest.
pub fn from_file(path: PathBuf) -> Result<Self> {
if let Ok(package_manifest) = PackageManifestFile::from_file(path.clone()) {
Ok(ManifestFile::Package(Box::new(package_manifest)))
} else if let Ok(workspace_manifest) = WorkspaceManifestFile::from_file(path.clone()) {
Ok(ManifestFile::Workspace(workspace_manifest))
} else {
bail!("Cannot find a valid `Forc.toml` at {:?}", path)
}
}
}

type PatchMap = BTreeMap<String, Dependency>;

/// A [PackageManifest] that was deserialized from a file at a particular path.
Expand Down Expand Up @@ -246,14 +277,21 @@ impl PackageManifest {
/// implicitly. In this case, the git tag associated with the version of this crate is used to
/// specify the pinned commit at which we fetch `std`.
pub fn from_file(path: &Path) -> Result<Self> {
// While creating a `ManifestFile` we need to check if the given path corresponds to a
// package or a workspace. While doing so, we should be printing the warnings if the given
// file parses so that we only see warnings for the correct type of manifest.
let mut warnings = vec![];
let manifest_str = std::fs::read_to_string(path)
.map_err(|e| anyhow!("failed to read manifest at {:?}: {}", path, e))?;
let toml_de = &mut toml::de::Deserializer::new(&manifest_str);
let mut manifest: Self = serde_ignored::deserialize(toml_de, |path| {
let warning = format!(" WARNING! unused manifest key: {}", path);
println_yellow_err(&warning);
warnings.push(warning);
})
.map_err(|e| anyhow!("failed to parse manifest: {}.", e))?;
for warning in warnings {
println_yellow_err(&warning);
}
manifest.implicitly_include_std_if_missing();
manifest.implicitly_include_default_build_profiles_if_missing();
manifest.validate()?;
Expand Down Expand Up @@ -569,14 +607,21 @@ impl WorkspaceManifestFile {
impl WorkspaceManifest {
/// Given a path to a `Forc.toml`, read it and construct a `WorkspaceManifest`.
pub fn from_file(path: &Path) -> Result<Self> {
// While creating a `ManifestFile` we need to check if the given path corresponds to a
// package or a workspace. While doing so, we should be printing the warnings if the given
// file parses so that we only see warnings for the correct type of manifest.
let mut warnings = vec![];
let manifest_str = std::fs::read_to_string(path)
.map_err(|e| anyhow!("failed to read manifest at {:?}: {}", path, e))?;
let toml_de = &mut toml::de::Deserializer::new(&manifest_str);
let manifest: Self = serde_ignored::deserialize(toml_de, |path| {
let warning = format!(" WARNING! unused manifest key: {}", path);
println_yellow_err(&warning);
warnings.push(warning);
})
.map_err(|e| anyhow!("failed to parse manifest: {}.", e))?;
for warning in warnings {
println_yellow_err(&warning);
}
Ok(manifest)
}

Expand Down
128 changes: 75 additions & 53 deletions forc-pkg/src/pkg.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
use crate::{
lock::Lock,
manifest::{
BuildProfile, ConfigTimeConstant, Dependency, PackageManifest, PackageManifestFile,
BuildProfile, ConfigTimeConstant, Dependency, ManifestFile, PackageManifest,
PackageManifestFile,
},
CORE, PRELUDE, STD,
};
Expand Down Expand Up @@ -70,13 +71,18 @@ pub struct PinnedId(u64);

/// The result of successfully compiling a package.
#[derive(Debug, Clone)]
pub struct Compiled {
pub struct BuiltPackage {
pub json_abi_program: JsonABIProgram,
pub storage_slots: Vec<StorageSlot>,
pub bytecode: Vec<u8>,
pub tree_type: TreeType,
}

pub enum Built {
Package(BuiltPackage),
Workspace,
}

/// A package uniquely identified by name along with its source.
#[derive(Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd, Deserialize, Serialize)]
pub struct Pkg {
Expand Down Expand Up @@ -1780,7 +1786,7 @@ pub fn sway_build_config(
/// then the std prelude will also be added.
pub fn dependency_namespace(
lib_namespace_map: &HashMap<NodeIx, namespace::Module>,
compiled_contract_deps: &HashMap<NodeIx, Compiled>,
compiled_contract_deps: &HashMap<NodeIx, BuiltPackage>,
graph: &Graph,
node: NodeIx,
constants: BTreeMap<String, ConfigTimeConstant>,
Expand Down Expand Up @@ -1932,7 +1938,7 @@ pub fn compile(
build_profile: &BuildProfile,
namespace: namespace::Module,
source_map: &mut SourceMap,
) -> Result<(Compiled, Option<namespace::Root>)> {
) -> Result<(BuiltPackage, Option<namespace::Root>)> {
// Time the given expression and print the result if `build_config.time_phases` is true.
macro_rules! time_expr {
($description:expr, $expression:expr) => {{
Expand Down Expand Up @@ -1995,13 +2001,13 @@ pub fn compile(
print_on_success_library(terse_mode, &pkg.name, &ast_res.warnings);
let bytecode = vec![];
let lib_namespace = typed_program.root.namespace.clone();
let compiled = Compiled {
let built_package = BuiltPackage {
json_abi_program,
storage_slots,
bytecode,
tree_type,
};
return Ok((compiled, Some(lib_namespace.into())));
return Ok((built_package, Some(lib_namespace.into())));
}
// For all other program types, we'll compile the bytecode.
TreeType::Contract | TreeType::Predicate | TreeType::Script => {}
Expand All @@ -2020,13 +2026,13 @@ pub fn compile(
Some(CompiledBytecode(bytes)) if bc_res.errors.is_empty() => {
print_on_success(terse_mode, &pkg.name, &bc_res.warnings, &tree_type);
let bytecode = bytes;
let compiled = Compiled {
let built_package = BuiltPackage {
json_abi_program,
storage_slots,
bytecode,
tree_type,
};
Ok((compiled, None))
Ok((built_package, None))
}
_ => fail(&bc_res.warnings, &bc_res.errors),
}
Expand Down Expand Up @@ -2085,20 +2091,22 @@ pub struct BuildOptions {
}

/// The suffix that helps identify the file which contains the hash of the binary file created when
/// scripts are built.
/// scripts are built_package.
pub const SWAY_BIN_HASH_SUFFIX: &str = "-bin-hash";

/// The suffix that helps identify the file which contains the root hash of the binary file created
/// when predicates are built.
/// when predicates are built_package.
pub const SWAY_BIN_ROOT_SUFFIX: &str = "-bin-root";

/// Builds a project with given BuildOptions
pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
pub fn build_package_with_options(
manifest: &PackageManifestFile,
build_options: BuildOptions,
) -> Result<BuiltPackage> {
let key_debug: String = "debug".to_string();
let key_release: String = "release".to_string();

let BuildOptions {
path,
path: _,
binary_outfile,
debug_outfile,
print_ast,
Expand Down Expand Up @@ -2136,17 +2144,6 @@ pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
}
}
}

let this_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};

let manifest = PackageManifestFile::from_dir(&this_dir)?;

let plan = BuildPlan::from_lock_and_manifest(&manifest, locked, offline_mode)?;

// Retrieve the specified build profile
let mut profile = manifest
.build_profile(&selected_build_profile)
Expand All @@ -2167,11 +2164,13 @@ pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
profile.time_phases |= time_phases;
profile.include_tests |= tests;

let plan = BuildPlan::from_lock_and_manifest(manifest, locked, offline_mode)?;

// Build it!
let (compiled, source_map) = build(&plan, &profile)?;
let (built_package, source_map) = build(&plan, &profile)?;

if let Some(outfile) = binary_outfile {
fs::write(&outfile, &compiled.bytecode)?;
fs::write(&outfile, &built_package.bytecode)?;
}

if let Some(outfile) = debug_outfile {
Expand All @@ -2191,25 +2190,25 @@ pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
let bin_path = output_dir
.join(&manifest.project.name)
.with_extension("bin");
fs::write(&bin_path, &compiled.bytecode)?;
if !compiled.json_abi_program.functions.is_empty() {
fs::write(&bin_path, &built_package.bytecode)?;
if !built_package.json_abi_program.functions.is_empty() {
let json_abi_program_stem = format!("{}-abi", manifest.project.name);
let json_abi_program_path = output_dir
.join(&json_abi_program_stem)
.with_extension("json");
let file = File::create(json_abi_program_path)?;
let res = if minify_json_abi {
serde_json::to_writer(&file, &compiled.json_abi_program)
serde_json::to_writer(&file, &built_package.json_abi_program)
} else {
serde_json::to_writer_pretty(&file, &compiled.json_abi_program)
serde_json::to_writer_pretty(&file, &built_package.json_abi_program)
};
res?;
}

info!(" Bytecode size is {} bytes.", compiled.bytecode.len());
info!(" Bytecode size is {} bytes.", built_package.bytecode.len());

// Additional ops required depending on the program type
match compiled.tree_type {
match built_package.tree_type {
TreeType::Contract => {
// For contracts, emit a JSON file with all the initialized storage slots.
let json_storage_slots_stem = format!("{}-storage_slots", manifest.project.name);
Expand All @@ -2218,24 +2217,24 @@ pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
.with_extension("json");
let storage_slots_file = File::create(json_storage_slots_path)?;
let res = if minify_json_storage_slots {
serde_json::to_writer(&storage_slots_file, &compiled.storage_slots)
serde_json::to_writer(&storage_slots_file, &built_package.storage_slots)
} else {
serde_json::to_writer_pretty(&storage_slots_file, &compiled.storage_slots)
serde_json::to_writer_pretty(&storage_slots_file, &built_package.storage_slots)
};

res?;
}
TreeType::Predicate => {
// get the root hash of the bytecode for predicates and store the result in a file in the output directory
let root = format!("0x{}", Contract::root_from_code(&compiled.bytecode));
let root = format!("0x{}", Contract::root_from_code(&built_package.bytecode));
let root_file_name = format!("{}{}", &manifest.project.name, SWAY_BIN_ROOT_SUFFIX);
let root_path = output_dir.join(root_file_name);
fs::write(root_path, &root)?;
info!(" Predicate root: {}", root);
}
TreeType::Script => {
// hash the bytecode for scripts and store the result in a file in the output directory
let bytecode_hash = format!("0x{}", fuel_crypto::Hasher::hash(&compiled.bytecode));
let bytecode_hash = format!("0x{}", fuel_crypto::Hasher::hash(&built_package.bytecode));
let hash_file_name = format!("{}{}", &manifest.project.name, SWAY_BIN_HASH_SUFFIX);
let hash_path = output_dir.join(hash_file_name);
fs::write(hash_path, &bytecode_hash)?;
Expand All @@ -2244,26 +2243,49 @@ pub fn build_with_options(build_options: BuildOptions) -> Result<Compiled> {
_ => (),
}

Ok(compiled)
Ok(built_package)
}

/// Returns the ContractId of a compiled contract with specified `salt`.
fn contract_id(compiled: &Compiled) -> ContractId {
/// Builds a project with given BuildOptions
pub fn build_with_options(build_options: BuildOptions) -> Result<Built> {
let path = &build_options.path;

let this_dir = if let Some(ref path) = path {
PathBuf::from(path)
} else {
std::env::current_dir()?
};

let manifest_file = ManifestFile::from_dir(&this_dir)?;
match manifest_file {
ManifestFile::Package(package_manifest) => {
let built_package = build_package_with_options(&package_manifest, build_options)?;
Ok(Built::Package(built_package))
}
ManifestFile::Workspace(_) => bail!("Workspace building is not supported"),
}
}

/// Returns the ContractId of a built_package contract with specified `salt`.
fn contract_id(built_package: &BuiltPackage) -> ContractId {
// Construct the contract ID
let contract = Contract::from(compiled.bytecode.clone());
let contract = Contract::from(built_package.bytecode.clone());
let salt = fuel_tx::Salt::new([0; 32]);
let mut storage_slots = compiled.storage_slots.clone();
let mut storage_slots = built_package.storage_slots.clone();
storage_slots.sort();
let state_root = Contract::initial_state_root(storage_slots.iter());
contract.id(&salt, &contract.root(), &state_root)
}

/// Build an entire forc package and return the compiled output.
/// Build an entire forc package and return the built_package output.
///
/// This compiles all packages (including dependencies) in the order specified by the `BuildPlan`.
///
/// Also returns the resulting `sway_core::SourceMap` which may be useful for debugging purposes.
pub fn build(plan: &BuildPlan, profile: &BuildProfile) -> anyhow::Result<(Compiled, SourceMap)> {
pub fn build(
plan: &BuildPlan,
profile: &BuildProfile,
) -> anyhow::Result<(BuiltPackage, SourceMap)> {
//TODO remove once type engine isn't global anymore.
sway_core::clear_lazy_statics();

Expand Down Expand Up @@ -2296,44 +2318,44 @@ pub fn build(plan: &BuildPlan, profile: &BuildProfile) -> anyhow::Result<(Compil
}
};
let res = compile(pkg, manifest, profile, dep_namespace, &mut source_map)?;
let (compiled, maybe_namespace) = res;
let (built_package, maybe_namespace) = res;
// If the current node is a contract dependency, collect the contract_id
if plan
.graph()
.edges_directed(node, Direction::Incoming)
.any(|e| e.weight().kind == DepKind::Contract)
{
compiled_contract_deps.insert(node, compiled.clone());
compiled_contract_deps.insert(node, built_package.clone());
}
if let Some(namespace) = maybe_namespace {
lib_namespace_map.insert(node, namespace.into());
}
json_abi_program
.types
.extend(compiled.json_abi_program.types);
.extend(built_package.json_abi_program.types);
json_abi_program
.functions
.extend(compiled.json_abi_program.functions);
.extend(built_package.json_abi_program.functions);
json_abi_program
.logged_types
.extend(compiled.json_abi_program.logged_types);
storage_slots.extend(compiled.storage_slots);
bytecode = compiled.bytecode;
tree_type = Some(compiled.tree_type);
.extend(built_package.json_abi_program.logged_types);
storage_slots.extend(built_package.storage_slots);
bytecode = built_package.bytecode;
tree_type = Some(built_package.tree_type);
source_map.insert_dependency(manifest.dir());
}

standardize_json_abi_types(&mut json_abi_program);

let tree_type =
tree_type.ok_or_else(|| anyhow!("build plan must contain at least one package"))?;
let compiled = Compiled {
let built_package = BuiltPackage {
bytecode,
json_abi_program,
storage_slots,
tree_type,
};
Ok((compiled, source_map))
Ok((built_package, source_map))
}

/// Standardize the JSON ABI data structure by eliminating duplicate types. This is an iterative
Expand Down
Loading

0 comments on commit de60a83

Please sign in to comment.