Squashed 'third_party/rules_rust/' content from commit bf59038ca
git-subtree-dir: third_party/rules_rust
git-subtree-split: bf59038cac11798cbaef9f3bf965bad8182b97fa
Signed-off-by: Brian Silverman <bsilver16384@gmail.com>
Change-Id: I5a20e403203d670df467ea97dde9a4ac40339a8d
diff --git a/crate_universe/src/cli.rs b/crate_universe/src/cli.rs
new file mode 100644
index 0000000..2ed27ac
--- /dev/null
+++ b/crate_universe/src/cli.rs
@@ -0,0 +1,42 @@
+//! Command line interface entry points and utilities
+
+mod generate;
+mod query;
+mod splice;
+mod vendor;
+
+use clap::Parser;
+
+use self::generate::GenerateOptions;
+use self::query::QueryOptions;
+use self::splice::SpliceOptions;
+use self::vendor::VendorOptions;
+
+// Entrypoints
+pub use generate::generate;
+pub use query::query;
+pub use splice::splice;
+pub use vendor::vendor;
+
+#[derive(Parser, Debug)]
+#[clap(name = "cargo-bazel", about, version)]
+pub enum Options {
+ /// Generate Bazel Build files from a Cargo manifest.
+ Generate(GenerateOptions),
+
+ /// Splice together disjoint Cargo and Bazel info into a single Cargo workspace manifest.
+ Splice(SpliceOptions),
+
+ /// Query workspace info to determine whether or not a repin is needed.
+ Query(QueryOptions),
+
+ /// Vendor BUILD files to the workspace with either repository definitions or `cargo vendor` generated sources.
+ Vendor(VendorOptions),
+}
+
+// Convenience wrappers to avoid dependencies in the binary
+pub type Result<T> = anyhow::Result<T>;
+
+pub fn parse_args() -> Options {
+ Options::parse()
+}
diff --git a/crate_universe/src/cli/generate.rs b/crate_universe/src/cli/generate.rs
new file mode 100644
index 0000000..67ae868
--- /dev/null
+++ b/crate_universe/src/cli/generate.rs
@@ -0,0 +1,142 @@
+//! The cli entrypoint for the `generate` subcommand
+
+use std::path::PathBuf;
+
+use anyhow::{bail, Result};
+use clap::Parser;
+
+use crate::config::Config;
+use crate::context::Context;
+use crate::lockfile::{is_cargo_lockfile, lock_context, write_lockfile, LockfileKind};
+use crate::metadata::load_metadata;
+use crate::metadata::Annotations;
+use crate::rendering::{write_outputs, Renderer};
+use crate::splicing::SplicingManifest;
+
+/// Command line options for the `generate` subcommand
+#[derive(Parser, Debug)]
+#[clap(about, version)]
+pub struct GenerateOptions {
+ /// The path to a Cargo binary to use for gathering metadata
+ #[clap(long, env = "CARGO")]
+ pub cargo: Option<PathBuf>,
+
+ /// The path to a rustc binary for use with Cargo
+ #[clap(long, env = "RUSTC")]
+ pub rustc: Option<PathBuf>,
+
+ /// The config file with information about the Bazel and Cargo workspace
+ #[clap(long)]
+ pub config: PathBuf,
+
+ /// A generated manifest of splicing inputs
+ #[clap(long)]
+ pub splicing_manifest: PathBuf,
+
+ /// The path to either a Cargo or Bazel lockfile
+ #[clap(long)]
+ pub lockfile: PathBuf,
+
+ /// The type of lockfile
+ #[clap(long)]
+ pub lockfile_kind: LockfileKind,
+
+ /// The directory of the current repository rule
+ #[clap(long)]
+ pub repository_dir: PathBuf,
+
+ /// A [Cargo config](https://doc.rust-lang.org/cargo/reference/config.html#configuration)
+ /// file to use when gathering metadata
+ #[clap(long)]
+ pub cargo_config: Option<PathBuf>,
+
+ /// Whether or not to ignore the provided lockfile and re-generate one
+ #[clap(long)]
+ pub repin: bool,
+
+ /// The path to a Cargo metadata `json` file.
+ #[clap(long)]
+ pub metadata: Option<PathBuf>,
+
+ /// If true, outputs will be printed instead of written to disk.
+ #[clap(long)]
+ pub dry_run: bool,
+}
+
+pub fn generate(opt: GenerateOptions) -> Result<()> {
+ // Load the config
+ let config = Config::try_from_path(&opt.config)?;
+
+ // Determine if the dependencies need to be repinned.
+ let mut should_repin = opt.repin;
+
+ // Cargo lockfiles must always be repinned.
+ if is_cargo_lockfile(&opt.lockfile, &opt.lockfile_kind) {
+ should_repin = true;
+ }
+
+ // Go straight to rendering if there is no need to repin
+ if !should_repin {
+ let context = Context::try_from_path(opt.lockfile)?;
+
+ // Render build files
+ let outputs = Renderer::new(config.rendering).render(&context)?;
+
+ // Write the outputs to disk
+ write_outputs(outputs, &opt.repository_dir, opt.dry_run)?;
+
+ return Ok(());
+ }
+
+ // Ensure Cargo and Rustc are available for use during generation.
+ let cargo_bin = match &opt.cargo {
+ Some(bin) => bin,
+ None => bail!("The `--cargo` argument is required when generating unpinned content"),
+ };
+ let rustc_bin = match &opt.rustc {
+ Some(bin) => bin,
+ None => bail!("The `--rustc` argument is required when generating unpinned content"),
+ };
+
+ // Ensure a path to a metadata file was provided
+ let metadata_path = match &opt.metadata {
+ Some(path) => path,
+ None => bail!("The `--metadata` argument is required when generating unpinned content"),
+ };
+
+ // Load Metadata and Lockfile
+ let (cargo_metadata, cargo_lockfile) = load_metadata(
+ metadata_path,
+ if is_cargo_lockfile(&opt.lockfile, &opt.lockfile_kind) {
+ Some(&opt.lockfile)
+ } else {
+ None
+ },
+ )?;
+
+ // Copy the rendering config for later use
+ let render_config = config.rendering.clone();
+
+ // Annotate metadata
+ let annotations = Annotations::new(cargo_metadata, cargo_lockfile, config.clone())?;
+
+ // Generate renderable contexts for earch package
+ let context = Context::new(annotations)?;
+
+ // Render build files
+ let outputs = Renderer::new(render_config).render(&context)?;
+
+ // Write outputs
+ write_outputs(outputs, &opt.repository_dir, opt.dry_run)?;
+
+ // Ensure Bazel lockfiles are written to disk so future generations can be short-circuted.
+ if matches!(opt.lockfile_kind, LockfileKind::Bazel) {
+ let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?;
+
+ let lockfile = lock_context(context, &config, &splicing_manifest, cargo_bin, rustc_bin)?;
+
+ write_lockfile(lockfile, &opt.lockfile, opt.dry_run)?;
+ }
+
+ Ok(())
+}
diff --git a/crate_universe/src/cli/query.rs b/crate_universe/src/cli/query.rs
new file mode 100644
index 0000000..668f64f
--- /dev/null
+++ b/crate_universe/src/cli/query.rs
@@ -0,0 +1,87 @@
+//! The cli entrypoint for the `query` subcommand
+
+use std::fs;
+use std::path::PathBuf;
+
+use anyhow::Result;
+use clap::Parser;
+
+use crate::config::Config;
+use crate::context::Context;
+use crate::lockfile::Digest;
+use crate::splicing::SplicingManifest;
+
+/// Command line options for the `query` subcommand
+#[derive(Parser, Debug)]
+#[clap(about, version)]
+pub struct QueryOptions {
+ /// The lockfile path for reproducible Cargo->Bazel renderings
+ #[clap(long)]
+ pub lockfile: PathBuf,
+
+ /// The config file with information about the Bazel and Cargo workspace
+ #[clap(long)]
+ pub config: PathBuf,
+
+ /// A generated manifest of splicing inputs
+ #[clap(long)]
+ pub splicing_manifest: PathBuf,
+
+ /// The path to a Cargo binary to use for gathering metadata
+ #[clap(long, env = "CARGO")]
+ pub cargo: PathBuf,
+
+ /// The path to a rustc binary for use with Cargo
+ #[clap(long, env = "RUSTC")]
+ pub rustc: PathBuf,
+}
+
+/// Determine if the current lockfile needs to be re-pinned
+pub fn query(opt: QueryOptions) -> Result<()> {
+ // Read the lockfile
+ let content = match fs::read_to_string(&opt.lockfile) {
+ Ok(c) => c,
+ Err(_) => return announce_repin("Unable to read lockfile"),
+ };
+
+ // Deserialize it so we can easily compare it with
+ let lockfile: Context = match serde_json::from_str(&content) {
+ Ok(ctx) => ctx,
+ Err(_) => return announce_repin("Could not load lockfile"),
+ };
+
+ // Check to see if a digest has been set
+ let digest = match &lockfile.checksum {
+ Some(d) => d.clone(),
+ None => return announce_repin("No digest provided in lockfile"),
+ };
+
+ // Load the config file
+ let config = Config::try_from_path(&opt.config)?;
+
+ let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?;
+
+ // Generate a new digest so we can compare it with the one in the lockfile
+ let expected = Digest::new(
+ &lockfile,
+ &config,
+ &splicing_manifest,
+ &opt.cargo,
+ &opt.rustc,
+ )?;
+ if digest != expected {
+ return announce_repin(&format!(
+ "Digests do not match: {:?} != {:?}",
+ digest, expected
+ ));
+ }
+
+ // There is no need to repin
+ Ok(())
+}
+
+fn announce_repin(reason: &str) -> Result<()> {
+ eprintln!("{}", reason);
+ println!("repin");
+ Ok(())
+}
diff --git a/crate_universe/src/cli/splice.rs b/crate_universe/src/cli/splice.rs
new file mode 100644
index 0000000..cb8ba20
--- /dev/null
+++ b/crate_universe/src/cli/splice.rs
@@ -0,0 +1,90 @@
+//! The cli entrypoint for the `splice` subcommand
+
+use std::path::PathBuf;
+
+use clap::Parser;
+
+use crate::cli::Result;
+use crate::metadata::{write_metadata, Generator, MetadataGenerator};
+use crate::splicing::{
+ generate_lockfile, ExtraManifestsManifest, Splicer, SplicingManifest, WorkspaceMetadata,
+};
+
+/// Command line options for the `splice` subcommand
+#[derive(Parser, Debug)]
+#[clap(about, version)]
+pub struct SpliceOptions {
+ /// A generated manifest of splicing inputs
+ #[clap(long)]
+ pub splicing_manifest: PathBuf,
+
+ /// A generated manifest of "extra workspace members"
+ #[clap(long)]
+ pub extra_manifests_manifest: PathBuf,
+
+ /// A Cargo lockfile (Cargo.lock).
+ #[clap(long)]
+ pub cargo_lockfile: Option<PathBuf>,
+
+ /// The directory in which to build the workspace. A `Cargo.toml` file
+ /// should always be produced within this directory.
+ #[clap(long)]
+ pub workspace_dir: PathBuf,
+
+ /// If true, outputs will be printed instead of written to disk.
+ #[clap(long)]
+ pub dry_run: bool,
+
+ /// The path to a Cargo configuration file.
+ #[clap(long)]
+ pub cargo_config: Option<PathBuf>,
+
+ /// The path to a Cargo binary to use for gathering metadata
+ #[clap(long, env = "CARGO")]
+ pub cargo: PathBuf,
+
+ /// The path to a rustc binary for use with Cargo
+ #[clap(long, env = "RUSTC")]
+ pub rustc: PathBuf,
+}
+
+/// Combine a set of disjoint manifests into a single workspace.
+pub fn splice(opt: SpliceOptions) -> Result<()> {
+ // Load the all config files required for splicing a workspace
+ let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?;
+ let extra_manifests_manifest =
+ ExtraManifestsManifest::try_from_path(opt.extra_manifests_manifest)?;
+
+ // Generate a splicer for creating a Cargo workspace manifest
+ let splicer = Splicer::new(
+ opt.workspace_dir,
+ splicing_manifest,
+ extra_manifests_manifest,
+ )?;
+
+ // Splice together the manifest
+ let manifest_path = splicer.splice_workspace()?;
+
+ // Generate a lockfile
+ let cargo_lockfile =
+ generate_lockfile(&manifest_path, &opt.cargo_lockfile, &opt.cargo, &opt.rustc)?;
+
+ // Write the registry url info to the manifest now that a lockfile has been generated
+ WorkspaceMetadata::write_registry_urls(&cargo_lockfile, &manifest_path)?;
+
+ // Write metadata to the workspace for future reuse
+ let (cargo_metadata, _) = Generator::new()
+ .with_cargo(opt.cargo)
+ .with_rustc(opt.rustc)
+ .generate(&manifest_path.as_path_buf())?;
+
+ // Write metadata next to the manifest
+ let metadata_path = manifest_path
+ .as_path_buf()
+ .parent()
+ .expect("Newly spliced cargo manifest has no parent directory")
+ .join("cargo-bazel-spliced-metadata.json");
+ write_metadata(&metadata_path, &cargo_metadata)?;
+
+ Ok(())
+}
diff --git a/crate_universe/src/cli/vendor.rs b/crate_universe/src/cli/vendor.rs
new file mode 100644
index 0000000..68e107f
--- /dev/null
+++ b/crate_universe/src/cli/vendor.rs
@@ -0,0 +1,167 @@
+//! The cli entrypoint for the `vendor` subcommand
+
+use std::collections::BTreeSet;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::{self, ExitStatus};
+
+use anyhow::{bail, Context as AnyhowContext, Result};
+use clap::Parser;
+
+use crate::config::{Config, VendorMode};
+use crate::context::Context;
+use crate::metadata::{Annotations, VendorGenerator};
+use crate::metadata::{Generator, MetadataGenerator};
+use crate::rendering::{render_module_label, write_outputs, Renderer};
+use crate::splicing::{
+ generate_lockfile, ExtraManifestsManifest, Splicer, SplicingManifest, WorkspaceMetadata,
+};
+
+/// Command line options for the `vendor` subcommand
+#[derive(Parser, Debug)]
+#[clap(about, version)]
+pub struct VendorOptions {
+ /// The path to a Cargo binary to use for gathering metadata
+ #[clap(long, env = "CARGO")]
+ pub cargo: PathBuf,
+
+ /// The path to a rustc binary for use with Cargo
+ #[clap(long, env = "RUSTC")]
+ pub rustc: PathBuf,
+
+ /// The path to a buildifier binary for formatting generated BUILD files
+ #[clap(long)]
+ pub buildifier: Option<PathBuf>,
+
+ /// The config file with information about the Bazel and Cargo workspace
+ #[clap(long)]
+ pub config: PathBuf,
+
+ /// A generated manifest of splicing inputs
+ #[clap(long)]
+ pub splicing_manifest: PathBuf,
+
+ /// The path to a Cargo lockfile
+ #[clap(long)]
+ pub cargo_lockfile: Option<PathBuf>,
+
+ /// A [Cargo config](https://doc.rust-lang.org/cargo/reference/config.html#configuration)
+ /// file to use when gathering metadata
+ #[clap(long)]
+ pub cargo_config: Option<PathBuf>,
+
+ /// The path to a Cargo metadata `json` file.
+ #[clap(long)]
+ pub metadata: Option<PathBuf>,
+
+ /// A generated manifest of "extra workspace members"
+ #[clap(long)]
+ pub extra_manifests_manifest: PathBuf,
+
+ /// The directory in which to build the workspace. A `Cargo.toml` file
+ /// should always be produced within this directory.
+ #[clap(long, env = "BUILD_WORKSPACE_DIRECTORY")]
+ pub workspace_dir: PathBuf,
+
+ /// If true, outputs will be printed instead of written to disk.
+ #[clap(long)]
+ pub dry_run: bool,
+}
+
+/// Run buildifier on a given file.
+fn buildifier_format(bin: &Path, file: &Path) -> Result<ExitStatus> {
+ let status = process::Command::new(bin)
+ .args(["-lint=fix", "-mode=fix", "-warnings=all"])
+ .arg(file)
+ .status()
+ .context("Failed to apply buildifier fixes")?;
+
+ if !status.success() {
+ bail!(status)
+ }
+
+ Ok(status)
+}
+
+pub fn vendor(opt: VendorOptions) -> Result<()> {
+ // Load the all config files required for splicing a workspace
+ let splicing_manifest =
+ SplicingManifest::try_from_path(&opt.splicing_manifest)?.absoulutize(&opt.workspace_dir);
+ let extra_manifests_manifest =
+ ExtraManifestsManifest::try_from_path(opt.extra_manifests_manifest)?.absoulutize();
+
+ let temp_dir = tempfile::tempdir().context("Failed to create temporary directory")?;
+
+ // Generate a splicer for creating a Cargo workspace manifest
+ let splicer = Splicer::new(
+ PathBuf::from(temp_dir.as_ref()),
+ splicing_manifest,
+ extra_manifests_manifest,
+ )
+ .context("Failed to crate splicer")?;
+
+ // Splice together the manifest
+ let manifest_path = splicer
+ .splice_workspace()
+ .context("Failed to splice workspace")?;
+
+ // Generate a lockfile
+ let cargo_lockfile =
+ generate_lockfile(&manifest_path, &opt.cargo_lockfile, &opt.cargo, &opt.rustc)?;
+
+ // Write the registry url info to the manifest now that a lockfile has been generated
+ WorkspaceMetadata::write_registry_urls(&cargo_lockfile, &manifest_path)?;
+
+ // Write metadata to the workspace for future reuse
+ let (cargo_metadata, cargo_lockfile) = Generator::new()
+ .with_cargo(opt.cargo.clone())
+ .with_rustc(opt.rustc.clone())
+ .generate(&manifest_path.as_path_buf())?;
+
+ // Load the config from disk
+ let config = Config::try_from_path(&opt.config)?;
+
+ // Annotate metadata
+ let annotations = Annotations::new(cargo_metadata, cargo_lockfile, config.clone())?;
+
+ // Generate renderable contexts for earch package
+ let context = Context::new(annotations)?;
+
+ // Render build files
+ let outputs = Renderer::new(config.rendering.clone()).render(&context)?;
+
+ // Cache the file names for potential use with buildifier
+ let file_names: BTreeSet<PathBuf> = outputs.keys().cloned().collect();
+
+ // First ensure vendoring and rendering happen in a clean directory
+ let vendor_dir_label = render_module_label(&config.rendering.crates_module_template, "BUILD")?;
+ let vendor_dir = opt
+ .workspace_dir
+ .join(vendor_dir_label.package.unwrap_or_default());
+ if vendor_dir.exists() {
+ fs::remove_dir_all(&vendor_dir)
+ .with_context(|| format!("Failed to delete {}", vendor_dir.display()))?;
+ }
+
+ // Vendor the crates from the spliced workspace
+ if matches!(config.rendering.vendor_mode, Some(VendorMode::Local)) {
+ VendorGenerator::new(opt.cargo.clone(), opt.rustc.clone())
+ .generate(manifest_path.as_path_buf(), &vendor_dir)
+ .context("Failed to vendor dependencies")?;
+ }
+
+ // Write outputs
+ write_outputs(outputs, &opt.workspace_dir, opt.dry_run)
+ .context("Failed writing output files")?;
+
+ // Optionally apply buildifier fixes
+ if let Some(buildifier_bin) = opt.buildifier {
+ for file in file_names {
+ let file_path = opt.workspace_dir.join(file);
+ buildifier_format(&buildifier_bin, &file_path)
+ .with_context(|| format!("Failed to run buildifier on {}", file_path.display()))?;
+ }
+ }
+
+ Ok(())
+}
diff --git a/crate_universe/src/config.rs b/crate_universe/src/config.rs
new file mode 100644
index 0000000..66e3a7e
--- /dev/null
+++ b/crate_universe/src/config.rs
@@ -0,0 +1,495 @@
+//! A module for configuration information
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::convert::AsRef;
+use std::iter::Sum;
+use std::ops::Add;
+use std::path::Path;
+use std::{fmt, fs};
+
+use anyhow::Result;
+use cargo_lock::package::source::GitReference;
+use cargo_metadata::Package;
+use semver::VersionReq;
+use serde::de::Visitor;
+use serde::{Deserialize, Serialize, Serializer};
+
+/// Representations of different kinds of crate vendoring into workspaces.
+#[derive(Debug, Serialize, Deserialize, Hash, Clone)]
+#[serde(rename_all = "lowercase")]
+pub enum VendorMode {
+ /// Crates having full source being vendored into a workspace
+ Local,
+
+ /// Crates having only BUILD files with repository rules vendored into a workspace
+ Remote,
+}
+
+impl std::fmt::Display for VendorMode {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ fmt::Display::fmt(
+ match self {
+ VendorMode::Local => "local",
+ VendorMode::Remote => "remote",
+ },
+ f,
+ )
+ }
+}
+
+#[derive(Debug, Default, Hash, Serialize, Deserialize, Clone)]
+#[serde(deny_unknown_fields)]
+pub struct RenderConfig {
+ /// The name of the repository being rendered
+ pub repository_name: String,
+
+ /// The pattern to use for BUILD file names.
+ /// Eg. `//:BUILD.{name}-{version}.bazel`
+ #[serde(default = "default_build_file_template")]
+ pub build_file_template: String,
+
+ /// The pattern to use for a crate target.
+ /// Eg. `@{repository}__{name}-{version}//:{target}`
+ #[serde(default = "default_crate_label_template")]
+ pub crate_label_template: String,
+
+ /// The pattern to use for the `defs.bzl` and `BUILD.bazel`
+ /// file names used for the crates module.
+ /// Eg. `//:{file}`
+ #[serde(default = "default_crates_module_template")]
+ pub crates_module_template: String,
+
+ /// The pattern used for a crate's repository name.
+ /// Eg. `{repository}__{name}-{version}`
+ #[serde(default = "default_crate_repository_template")]
+ pub crate_repository_template: String,
+
+ /// The default of the `package_name` parameter to use for the module macros like `all_crate_deps`.
+ /// In general, this should be be unset to allow the macros to do auto-detection in the analysis phase.
+ pub default_package_name: Option<String>,
+
+ /// The pattern to use for platform constraints.
+ /// Eg. `@rules_rust//rust/platform:{triple}`.
+ #[serde(default = "default_platforms_template")]
+ pub platforms_template: String,
+
+ /// An optional configuration for rendirng content to be rendered into repositories.
+ pub vendor_mode: Option<VendorMode>,
+}
+
+fn default_build_file_template() -> String {
+ "//:BUILD.{name}-{version}.bazel".to_owned()
+}
+
+fn default_crates_module_template() -> String {
+ "//:{file}".to_owned()
+}
+
+fn default_crate_label_template() -> String {
+ "@{repository}__{name}-{version}//:{target}".to_owned()
+}
+
+fn default_crate_repository_template() -> String {
+ "{repository}__{name}-{version}".to_owned()
+}
+
+fn default_platforms_template() -> String {
+ "@rules_rust//rust/platform:{triple}".to_owned()
+}
+
+/// A representation of some Git identifier used to represent the "revision" or "pin" of a checkout.
+#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq, PartialOrd, Ord)]
+pub enum Commitish {
+ /// From a tag.
+ Tag(String),
+
+ /// From the HEAD of a branch.
+ Branch(String),
+
+ /// From a specific revision.
+ Rev(String),
+}
+
+impl From<GitReference> for Commitish {
+ fn from(git_ref: GitReference) -> Self {
+ match git_ref {
+ GitReference::Tag(v) => Self::Tag(v),
+ GitReference::Branch(v) => Self::Branch(v),
+ GitReference::Rev(v) => Self::Rev(v),
+ }
+ }
+}
+
+/// Information representing deterministic identifiers for some remote asset.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
+pub enum Checksumish {
+ Http {
+ /// The sha256 digest of an http archive
+ sha256: Option<String>,
+ },
+ Git {
+ /// The revision of the git repository
+ commitsh: Commitish,
+
+ /// An optional date, not after the specified commit; the argument is
+ /// not allowed if a tag is specified (which allows cloning with depth
+ /// 1).
+ shallow_since: Option<String>,
+ },
+}
+
+#[derive(Debug, Default, Hash, Deserialize, Serialize, Clone)]
+pub struct CrateAnnotations {
+ /// Determins whether or not Cargo build scripts should be generated for the current package
+ pub gen_build_script: Option<bool>,
+
+ /// Additional data to pass to
+ /// [deps](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-deps) attribute.
+ pub deps: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to
+ /// [proc_macro_deps](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-proc_macro_deps) attribute.
+ pub proc_macro_deps: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to the target's
+ /// [crate_features](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-crate_features) attribute.
+ pub crate_features: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to the target's
+ /// [data](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-data) attribute.
+ pub data: Option<BTreeSet<String>>,
+
+ /// An optional glob pattern to set on the
+ /// [data](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-data) attribute.
+ pub data_glob: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to
+ /// [compile_data](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-compile_data) attribute.
+ pub compile_data: Option<BTreeSet<String>>,
+
+ /// An optional glob pattern to set on the
+ /// [compile_data](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-compile_data) attribute.
+ pub compile_data_glob: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to the target's
+ /// [rustc_env](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-rustc_env) attribute.
+ pub rustc_env: Option<BTreeMap<String, String>>,
+
+ /// Additional data to pass to the target's
+ /// [rustc_env_files](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-rustc_env_files) attribute.
+ pub rustc_env_files: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to the target's
+ /// [rustc_flags](https://bazelbuild.github.io/rules_rust/defs.html#rust_library-rustc_flags) attribute.
+ pub rustc_flags: Option<Vec<String>>,
+
+ /// Additional dependencies to pass to a build script's
+ /// [deps](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-deps) attribute.
+ pub build_script_deps: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to a build script's
+ /// [proc_macro_deps](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-proc_macro_deps) attribute.
+ pub build_script_proc_macro_deps: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to a build script's
+ /// [build_script_data](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-data) attribute.
+ pub build_script_data: Option<BTreeSet<String>>,
+
+ /// Additional data to pass to a build script's
+ /// [tools](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-tools) attribute.
+ pub build_script_tools: Option<BTreeSet<String>>,
+
+ /// An optional glob pattern to set on the
+ /// [build_script_data](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-build_script_env) attribute.
+ pub build_script_data_glob: Option<BTreeSet<String>>,
+
+ /// Additional environment variables to pass to a build script's
+ /// [build_script_env](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-rustc_env) attribute.
+ pub build_script_env: Option<BTreeMap<String, String>>,
+
+ /// Additional rustc_env flags to pass to a build script's
+ /// [rustc_env](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-rustc_env) attribute.
+ pub build_script_rustc_env: Option<BTreeMap<String, String>>,
+
+ /// A scratch pad used to write arbitrary text to target BUILD files.
+ pub additive_build_file_content: Option<String>,
+
+ /// For git sourced crates, this is a the
+ /// [git_repository::shallow_since](https://docs.bazel.build/versions/main/repo/git.html#new_git_repository-shallow_since) attribute.
+ pub shallow_since: Option<String>,
+
+ /// The `patch_args` attribute of a Bazel repository rule. See
+ /// [http_archive.patch_args](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patch_args)
+ pub patch_args: Option<Vec<String>>,
+
+ /// The `patch_tool` attribute of a Bazel repository rule. See
+ /// [http_archive.patch_tool](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patch_tool)
+ pub patch_tool: Option<String>,
+
+ /// The `patches` attribute of a Bazel repository rule. See
+ /// [http_archive.patches](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patches)
+ pub patches: Option<BTreeSet<String>>,
+}
+
+macro_rules! joined_extra_member {
+ ($lhs:expr, $rhs:expr, $fn_new:expr, $fn_extend:expr) => {
+ if let Some(lhs) = $lhs {
+ if let Some(rhs) = $rhs {
+ let mut new = $fn_new();
+ $fn_extend(&mut new, lhs);
+ $fn_extend(&mut new, rhs);
+ Some(new)
+ } else {
+ Some(lhs)
+ }
+ } else if $rhs.is_some() {
+ $rhs
+ } else {
+ None
+ }
+ };
+}
+
+impl Add for CrateAnnotations {
+ type Output = CrateAnnotations;
+
+ fn add(self, rhs: Self) -> Self::Output {
+ let shallow_since = if self.shallow_since.is_some() {
+ self.shallow_since
+ } else if rhs.shallow_since.is_some() {
+ rhs.shallow_since
+ } else {
+ None
+ };
+
+ let patch_tool = if self.patch_tool.is_some() {
+ self.patch_tool
+ } else if rhs.patch_tool.is_some() {
+ rhs.patch_tool
+ } else {
+ None
+ };
+
+ let gen_build_script = if self.gen_build_script.is_some() {
+ self.gen_build_script
+ } else if rhs.gen_build_script.is_some() {
+ rhs.gen_build_script
+ } else {
+ None
+ };
+
+ let concat_string = |lhs: &mut String, rhs: String| {
+ *lhs = format!("{}{}", lhs, rhs);
+ };
+
+ #[rustfmt::skip]
+ let output = CrateAnnotations {
+ gen_build_script,
+ deps: joined_extra_member!(self.deps, rhs.deps, BTreeSet::new, BTreeSet::extend),
+ proc_macro_deps: joined_extra_member!(self.proc_macro_deps, rhs.proc_macro_deps, BTreeSet::new, BTreeSet::extend),
+ crate_features: joined_extra_member!(self.crate_features, rhs.crate_features, BTreeSet::new, BTreeSet::extend),
+ data: joined_extra_member!(self.data, rhs.data, BTreeSet::new, BTreeSet::extend),
+ data_glob: joined_extra_member!(self.data_glob, rhs.data_glob, BTreeSet::new, BTreeSet::extend),
+ compile_data: joined_extra_member!(self.compile_data, rhs.compile_data, BTreeSet::new, BTreeSet::extend),
+ compile_data_glob: joined_extra_member!(self.compile_data_glob, rhs.compile_data_glob, BTreeSet::new, BTreeSet::extend),
+ rustc_env: joined_extra_member!(self.rustc_env, rhs.rustc_env, BTreeMap::new, BTreeMap::extend),
+ rustc_env_files: joined_extra_member!(self.rustc_env_files, rhs.rustc_env_files, BTreeSet::new, BTreeSet::extend),
+ rustc_flags: joined_extra_member!(self.rustc_flags, rhs.rustc_flags, Vec::new, Vec::extend),
+ build_script_deps: joined_extra_member!(self.build_script_deps, rhs.build_script_deps, BTreeSet::new, BTreeSet::extend),
+ build_script_proc_macro_deps: joined_extra_member!(self.build_script_proc_macro_deps, rhs.build_script_proc_macro_deps, BTreeSet::new, BTreeSet::extend),
+ build_script_data: joined_extra_member!(self.build_script_data, rhs.build_script_data, BTreeSet::new, BTreeSet::extend),
+ build_script_tools: joined_extra_member!(self.build_script_tools, rhs.build_script_tools, BTreeSet::new, BTreeSet::extend),
+ build_script_data_glob: joined_extra_member!(self.build_script_data_glob, rhs.build_script_data_glob, BTreeSet::new, BTreeSet::extend),
+ build_script_env: joined_extra_member!(self.build_script_env, rhs.build_script_env, BTreeMap::new, BTreeMap::extend),
+ build_script_rustc_env: joined_extra_member!(self.build_script_rustc_env, rhs.build_script_rustc_env, BTreeMap::new, BTreeMap::extend),
+ additive_build_file_content: joined_extra_member!(self.additive_build_file_content, rhs.additive_build_file_content, String::new, concat_string),
+ shallow_since,
+ patch_args: joined_extra_member!(self.patch_args, rhs.patch_args, Vec::new, Vec::extend),
+ patch_tool,
+ patches: joined_extra_member!(self.patches, rhs.patches, BTreeSet::new, BTreeSet::extend),
+ };
+
+ output
+ }
+}
+
+impl Sum for CrateAnnotations {
+ fn sum<I: Iterator<Item = Self>>(iter: I) -> Self {
+ iter.fold(CrateAnnotations::default(), |a, b| a + b)
+ }
+}
+
+/// A unique identifier for Crates
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Hash, Clone)]
+pub struct CrateId {
+ /// The name of the crate
+ pub name: String,
+
+ /// The crate's semantic version
+ pub version: String,
+}
+
+impl CrateId {
+ /// Construct a new [CrateId]
+ pub fn new(name: String, version: String) -> Self {
+ Self { name, version }
+ }
+
+ /// Compares a [CrateId] against a [cargo_metadata::Package].
+ pub fn matches(&self, package: &Package) -> bool {
+ // If the package name does not match, it's obviously
+ // not the right package
+ if self.name != "*" && self.name != package.name {
+ return false;
+ }
+
+ // First see if the package version matches exactly
+ if package.version.to_string() == self.version {
+ return true;
+ }
+
+ // Next, check to see if the version provided is a semver req and
+ // check if the package matches the condition
+ if let Ok(semver) = VersionReq::parse(&self.version) {
+ if semver.matches(&package.version) {
+ return true;
+ }
+ }
+
+ false
+ }
+}
+
+impl From<&Package> for CrateId {
+ fn from(package: &Package) -> Self {
+ Self {
+ name: package.name.clone(),
+ version: package.version.to_string(),
+ }
+ }
+}
+
+impl Serialize for CrateId {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.serialize_str(&format!("{} {}", self.name, self.version))
+ }
+}
+
+struct CrateIdVisitor;
+impl<'de> Visitor<'de> for CrateIdVisitor {
+ type Value = CrateId;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Expected string value of `{name} {version}`.")
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ v.rsplit_once(' ')
+ .map(|(name, version)| CrateId {
+ name: name.to_string(),
+ version: version.to_string(),
+ })
+ .ok_or_else(|| {
+ E::custom(format!(
+ "Expected string value of `{{name}} {{version}}`. Got '{}'",
+ v
+ ))
+ })
+ }
+}
+
+impl<'de> Deserialize<'de> for CrateId {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_str(CrateIdVisitor)
+ }
+}
+
+impl std::fmt::Display for CrateId {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ fmt::Display::fmt(&format!("{} {}", self.name, self.version), f)
+ }
+}
+
+/// Workspace specific settings to control how targets are generated
+#[derive(Debug, Default, Serialize, Deserialize, Clone)]
+#[serde(deny_unknown_fields)]
+pub struct Config {
+ /// Whether or not to generate Cargo build scripts by default
+ pub generate_build_scripts: bool,
+
+ /// Additional settings to apply to generated crates
+ #[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
+ pub annotations: BTreeMap<CrateId, CrateAnnotations>,
+
+ /// Settings used to determine various render info
+ pub rendering: RenderConfig,
+
+ /// The contents of a Cargo configuration file
+ pub cargo_config: Option<toml::Value>,
+
+ /// A set of platform triples to use in generated select statements
+ #[serde(default, skip_serializing_if = "BTreeSet::is_empty")]
+ pub supported_platform_triples: BTreeSet<String>,
+}
+
+impl Config {
+ pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> {
+ let data = fs::read_to_string(path)?;
+ Ok(serde_json::from_str(&data)?)
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use crate::test::*;
+
+ #[test]
+ fn test_crate_id_serde() {
+ let id: CrateId = serde_json::from_str("\"crate 0.1.0\"").unwrap();
+ assert_eq!(id, CrateId::new("crate".to_owned(), "0.1.0".to_owned()));
+ assert_eq!(serde_json::to_string(&id).unwrap(), "\"crate 0.1.0\"");
+ }
+
+ #[test]
+ fn test_crate_id_serde_semver() {
+ let semver_id: CrateId = serde_json::from_str("\"crate *\"").unwrap();
+ assert_eq!(semver_id, CrateId::new("crate".to_owned(), "*".to_owned()));
+ assert_eq!(serde_json::to_string(&semver_id).unwrap(), "\"crate *\"");
+ }
+
+ #[test]
+ fn test_crate_id_matches() {
+ let mut package = mock_cargo_metadata_package();
+ let id = CrateId::new("mock-pkg".to_owned(), "0.1.0".to_owned());
+
+ package.version = cargo_metadata::Version::new(0, 1, 0);
+ assert!(id.matches(&package));
+
+ package.version = cargo_metadata::Version::new(1, 0, 0);
+ assert!(!id.matches(&package));
+ }
+
+ #[test]
+ fn test_crate_id_semver_matches() {
+ let mut package = mock_cargo_metadata_package();
+ package.version = cargo_metadata::Version::new(1, 0, 0);
+ let mut id = CrateId::new("mock-pkg".to_owned(), "0.1.0".to_owned());
+
+ id.version = "*".to_owned();
+ assert!(id.matches(&package));
+
+ id.version = "<1".to_owned();
+ assert!(!id.matches(&package));
+ }
+}
diff --git a/crate_universe/src/context.rs b/crate_universe/src/context.rs
new file mode 100644
index 0000000..912cd80
--- /dev/null
+++ b/crate_universe/src/context.rs
@@ -0,0 +1,511 @@
+//! Convert annotated metadata into a renderable context
+
+pub mod crate_context;
+mod platforms;
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::Result;
+use serde::{Deserialize, Serialize};
+
+use crate::config::CrateId;
+use crate::context::crate_context::{CrateContext, CrateDependency, Rule};
+use crate::context::platforms::resolve_cfg_platforms;
+use crate::lockfile::Digest;
+use crate::metadata::Annotations;
+use crate::utils::starlark::{Select, SelectList};
+
+pub use self::crate_context::*;
+
+/// A struct containing information about a Cargo dependency graph in an easily to consume
+/// format for rendering reproducible Bazel targets.
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+pub struct Context {
+ /// The collective checksum of all inputs to the context
+ pub checksum: Option<Digest>,
+
+ /// The collection of all crates that make up the dependency graph
+ pub crates: BTreeMap<CrateId, CrateContext>,
+
+ /// A subset of only crates with binary targets
+ pub binary_crates: BTreeSet<CrateId>,
+
+ /// A subset of workspace members mapping to their workspace
+ /// path relative to the workspace root
+ pub workspace_members: BTreeMap<CrateId, String>,
+
+ /// A mapping of `cfg` flags to platform triples supporting the configuration
+ pub conditions: BTreeMap<String, BTreeSet<String>>,
+}
+
+impl Context {
+ pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> {
+ let data = fs::read_to_string(path.as_ref())?;
+ Ok(serde_json::from_str(&data)?)
+ }
+
+ pub fn new(annotations: Annotations) -> Result<Self> {
+ // Build a map of crate contexts
+ let crates: BTreeMap<CrateId, CrateContext> = annotations
+ .metadata
+ .crates
+ .iter()
+ // Convert the crate annotations into more renderable contexts
+ .map(|(_, annotation)| {
+ let context = CrateContext::new(
+ annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &annotations.pairred_extras,
+ annotations.config.generate_build_scripts,
+ );
+ let id = CrateId::new(context.name.clone(), context.version.clone());
+ (id, context)
+ })
+ .collect();
+
+ // Filter for any crate that contains a binary
+ let binary_crates: BTreeSet<CrateId> = crates
+ .iter()
+ .filter(|(_, ctx)| ctx.targets.iter().any(|t| matches!(t, Rule::Binary(..))))
+ // Only consider remote repositories (so non-workspace members).
+ .filter(|(_, ctx)| ctx.repository.is_some())
+ .map(|(id, _)| id.clone())
+ .collect();
+
+ // Given a list of all conditional dependencies, build a set of platform
+ // triples which satsify the conditions.
+ let conditions = resolve_cfg_platforms(
+ crates.values().collect(),
+ &annotations.config.supported_platform_triples,
+ )?;
+
+ // Generate a list of all workspace members
+ let workspace_members = annotations
+ .metadata
+ .workspace_members
+ .iter()
+ .filter_map(|id| {
+ let pkg = &annotations.metadata.packages[id];
+ let package_path_id = match Self::get_package_path_id(
+ pkg,
+ &annotations.metadata.workspace_root,
+ &annotations.metadata.workspace_metadata.workspace_prefix,
+ &annotations.metadata.workspace_metadata.package_prefixes,
+ ) {
+ Ok(id) => id,
+ Err(e) => return Some(Err(e)),
+ };
+ let crate_id = CrateId::new(pkg.name.clone(), pkg.version.to_string());
+
+ // Crates that have repository information are not considered workspace members.
+ // The assumpion is that they are "extra workspace members".
+ match crates[&crate_id].repository {
+ Some(_) => None,
+ None => Some(Ok((crate_id, package_path_id))),
+ }
+ })
+ .collect::<Result<BTreeMap<CrateId, String>>>()?;
+
+ Ok(Self {
+ checksum: None,
+ crates,
+ binary_crates,
+ workspace_members,
+ conditions,
+ })
+ }
+
+ // A helper function for locating the unique path in a workspace to a workspace member
+ fn get_package_path_id(
+ package: &cargo_metadata::Package,
+ workspace_root: &Path,
+ workspace_prefix: &Option<String>,
+ package_prefixes: &BTreeMap<String, String>,
+ ) -> Result<String> {
+ // Locate the package's manifest directory
+ let manifest_dir = package
+ .manifest_path
+ .parent()
+ .expect("Every manifest should have a parent")
+ .as_std_path();
+
+ // Compare it with the root of the workspace
+ let package_path_diff = pathdiff::diff_paths(manifest_dir, workspace_root)
+ .expect("Every workspace member's manifest is a child of the workspace root");
+
+ // Ensure the package paths are adjusted in the macros according to the splicing results
+ let package_path = match package_prefixes.get(&package.name) {
+ // Any package prefix should be absolute and therefore always applied
+ Some(prefix) => PathBuf::from(prefix).join(package_path_diff),
+ // If no package prefix is present, attempt to apply the workspace prefix
+ // since workspace members would not have shown up with their own label
+ None => match workspace_prefix {
+ Some(prefix) => PathBuf::from(prefix).join(package_path_diff),
+ None => package_path_diff,
+ },
+ };
+
+ // Sanitize the path for increased consistency
+ let package_path_id = package_path
+ .display()
+ .to_string()
+ .replace('\\', "/")
+ .trim_matches('/')
+ .to_owned();
+
+ Ok(package_path_id)
+ }
+
+ /// Filter a crate's dependencies to only ones with aliases
+ pub fn crate_aliases(
+ &self,
+ crate_id: &CrateId,
+ build: bool,
+ include_dev: bool,
+ ) -> SelectList<&CrateDependency> {
+ let ctx = &self.crates[crate_id];
+ let mut set = SelectList::default();
+
+ // Return a set of aliases for build dependencies
+ // vs normal dependencies when requested.
+ if build {
+ // Note that there may not be build dependencies so no dependencies
+ // will be gathered in this case
+ if let Some(attrs) = &ctx.build_script_attrs {
+ let collection: Vec<(Option<String>, &CrateDependency)> = attrs
+ .deps
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ attrs
+ .deps
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ })
+ .chain(attrs.proc_macro_deps.configurations().into_iter().flat_map(
+ move |conf| {
+ attrs
+ .proc_macro_deps
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ },
+ ))
+ .collect();
+
+ for (config, dep) in collection {
+ set.insert(dep, config);
+ }
+ }
+ } else {
+ let attrs = &ctx.common_attrs;
+ let mut collection: Vec<(Option<String>, &CrateDependency)> =
+ attrs
+ .deps
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ attrs
+ .deps
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ })
+ .chain(attrs.proc_macro_deps.configurations().into_iter().flat_map(
+ move |conf| {
+ attrs
+ .proc_macro_deps
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ },
+ ))
+ .collect();
+
+ // Optionally include dev dependencies
+ if include_dev {
+ collection = collection
+ .into_iter()
+ .chain(
+ attrs
+ .deps_dev
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ attrs
+ .deps_dev
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ }),
+ )
+ .chain(
+ attrs
+ .proc_macro_deps_dev
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ attrs
+ .proc_macro_deps_dev
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.alias.is_some())
+ .map(move |dep| (conf.cloned(), dep))
+ }),
+ )
+ .collect();
+ }
+
+ for (config, dep) in collection {
+ set.insert(dep, config);
+ }
+ }
+
+ set
+ }
+
+ /// Create a set of all direct dependencies of workspace member crates and map them to
+ /// optional alternative names that allow them to be uniquely identified. This typically
+ /// results in a mapping of ([CrateId], [None]) where [None] defaults to using the crate
+ /// name. The next most common would be using ([CrateId], `Some(alias)`) as some projects
+ /// may use aliases in Cargo as a way to differentiate different versions of the same dep.
+ pub fn flat_workspace_member_deps(&self) -> BTreeMap<CrateId, Option<String>> {
+ let workspace_member_dependencies: BTreeSet<CrateDependency> = self
+ .workspace_members
+ .iter()
+ .map(|(id, _)| &self.crates[id])
+ .flat_map(|ctx| {
+ // Build an interator of all dependency CrateIds.
+ // TODO: This expansion is horribly verbose and should be refactored but closures
+ // were not playing nice when I tried it.
+ ctx.common_attrs
+ .deps
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ ctx.common_attrs
+ .deps
+ .get_iter(conf)
+ .expect("Lookup should be guaranteed")
+ })
+ .chain(
+ ctx.common_attrs
+ .deps_dev
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ ctx.common_attrs
+ .deps_dev
+ .get_iter(conf)
+ .expect("Lookup should be guaranteed")
+ }),
+ )
+ .chain(
+ ctx.common_attrs
+ .proc_macro_deps
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ ctx.common_attrs
+ .proc_macro_deps
+ .get_iter(conf)
+ .expect("Lookup should be guaranteed")
+ }),
+ )
+ .chain(
+ ctx.common_attrs
+ .proc_macro_deps_dev
+ .configurations()
+ .into_iter()
+ .flat_map(move |conf| {
+ ctx.common_attrs
+ .proc_macro_deps_dev
+ .get_iter(conf)
+ .expect("Lookup should be guaranteed")
+ }),
+ )
+ })
+ .cloned()
+ .collect();
+
+ // Search for any duplicate workspace member definitions
+ let duplicate_deps: Vec<CrateDependency> = workspace_member_dependencies
+ .iter()
+ .filter(|dep| {
+ workspace_member_dependencies
+ .iter()
+ .filter(|check| dep.id.name == check.id.name)
+ .count()
+ > 1
+ })
+ .cloned()
+ .collect();
+
+ workspace_member_dependencies
+ .into_iter()
+ .map(|dep| {
+ if duplicate_deps.contains(&dep) {
+ if let Some(alias) = &dep.alias {
+ // Check for any duplicate aliases
+ let aliases = duplicate_deps
+ .iter()
+ .filter(|dupe| dupe.id.name == dep.id.name)
+ .filter(|dupe| dupe.alias.is_some())
+ .filter(|dupe| dupe.alias == dep.alias);
+
+ // If there are multiple aliased crates with the same name, the name is updated to
+ // be `{alias}-{version}` to differentiate them.
+ if aliases.count() >= 2 {
+ let rename = format!("{}-{}", &alias, &dep.id.version);
+ (dep.id, Some(rename))
+ } else {
+ (dep.id, Some(alias.clone()))
+ }
+ } else {
+ // Check for all duplicates that match the current dependency and have no alias
+ let unaliased = duplicate_deps
+ .iter()
+ .filter(|dupe| dupe.id.name == dep.id.name)
+ .filter(|dupe| dupe.alias.is_none());
+
+ // If there are multiple unaliased crates with the same name, the name is updated to
+ // be `{name}-{version}` to differentiate them.
+ if unaliased.count() >= 2 {
+ let rename = format!("{}-{}", &dep.id.name, &dep.id.version);
+ (dep.id, Some(rename))
+ } else {
+ (dep.id, None)
+ }
+ }
+ } else {
+ (dep.id, dep.alias)
+ }
+ })
+ .collect()
+ }
+
+ /// Produce a list of binary dependencies with optional aliases which prevent duplicate
+ /// targets from being generated.
+ pub fn flat_binary_deps(&self) -> BTreeMap<CrateId, Option<String>> {
+ // Check for any duplicate binary crate names. If one exists provide an alias to differentiate them
+ self.binary_crates
+ .iter()
+ .map(|crate_id| {
+ let dupe_count = self
+ .binary_crates
+ .iter()
+ .filter(|id| crate_id.name == id.name)
+ .count();
+ // For targets that appear twice (which can happen if one crate aliases a binary dependency)
+ if dupe_count >= 2 {
+ let rename = format!("{}-{}", crate_id.name, crate_id.version);
+ (crate_id.clone(), Some(rename))
+ } else {
+ (crate_id.clone(), None)
+ }
+ })
+ .collect()
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use crate::config::Config;
+
+ fn mock_context_common() -> Context {
+ let annotations = Annotations::new(
+ crate::test::metadata::common(),
+ crate::test::lockfile::common(),
+ Config::default(),
+ )
+ .unwrap();
+
+ Context::new(annotations).unwrap()
+ }
+
+ fn mock_context_aliases() -> Context {
+ let annotations = Annotations::new(
+ crate::test::metadata::alias(),
+ crate::test::lockfile::alias(),
+ Config::default(),
+ )
+ .unwrap();
+
+ Context::new(annotations).unwrap()
+ }
+
+ #[test]
+ fn flat_workspace_member_deps() {
+ let context = mock_context_common();
+ let workspace_member_deps = context.flat_workspace_member_deps();
+
+ assert_eq!(
+ workspace_member_deps,
+ BTreeMap::from([
+ (
+ CrateId::new("bitflags".to_owned(), "1.3.2".to_owned()),
+ None
+ ),
+ (CrateId::new("cfg-if".to_owned(), "1.0.0".to_owned()), None),
+ ])
+ );
+ }
+
+ #[test]
+ fn flat_workspace_member_deps_with_alises() {
+ let context = mock_context_aliases();
+ let workspace_member_deps = context.flat_workspace_member_deps();
+
+ assert_eq!(
+ workspace_member_deps,
+ BTreeMap::from([
+ (
+ CrateId {
+ name: "log".to_owned(),
+ version: "0.3.9".to_owned(),
+ },
+ Some("pinned_log".to_owned())
+ ),
+ (
+ CrateId {
+ name: "log".to_owned(),
+ version: "0.4.14".to_owned(),
+ },
+ None
+ ),
+ (
+ CrateId {
+ name: "names".to_owned(),
+ version: "0.12.1-dev".to_owned(),
+ },
+ Some("pinned_names".to_owned())
+ ),
+ (
+ CrateId {
+ name: "names".to_owned(),
+ version: "0.13.0".to_owned(),
+ },
+ None
+ ),
+ (
+ CrateId {
+ name: "value-bag".to_owned(),
+ version: "1.0.0-alpha.7".to_owned(),
+ },
+ None
+ ),
+ ])
+ );
+ }
+}
diff --git a/crate_universe/src/context/crate_context.rs b/crate_universe/src/context/crate_context.rs
new file mode 100644
index 0000000..0278ebe
--- /dev/null
+++ b/crate_universe/src/context/crate_context.rs
@@ -0,0 +1,838 @@
+//! Crate specific information embedded into [crate::context::Context] objects.
+
+use std::collections::{BTreeMap, BTreeSet};
+
+use cargo_metadata::{Node, Package, PackageId};
+use serde::{Deserialize, Serialize};
+
+use crate::config::CrateId;
+use crate::metadata::{CrateAnnotation, Dependency, PairredExtras, SourceAnnotation};
+use crate::utils::sanitize_module_name;
+use crate::utils::starlark::{Glob, SelectList, SelectMap, SelectStringDict, SelectStringList};
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+pub struct CrateDependency {
+ /// The [CrateId] of the dependency
+ pub id: CrateId,
+
+ /// The target name of the dependency. Note this may differ from the
+ /// dependency's package name in cases such as build scripts.
+ pub target: String,
+
+ /// Some dependencies are assigned aliases. This is tracked here
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ pub alias: Option<String>,
+}
+
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+#[serde(default)]
+pub struct TargetAttributes {
+ /// The module name of the crate (notably, not the package name).
+ pub crate_name: String,
+
+ /// The path to the crate's root source file, relative to the manifest.
+ pub crate_root: Option<String>,
+
+ /// A glob pattern of all source files required by the target
+ pub srcs: Glob,
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+pub enum Rule {
+ /// `cargo_build_script`
+ BuildScript(TargetAttributes),
+
+ /// `rust_proc_macro`
+ ProcMacro(TargetAttributes),
+
+ /// `rust_library`
+ Library(TargetAttributes),
+
+ /// `rust_binary`
+ Binary(TargetAttributes),
+}
+
+/// A set of attributes common to most `rust_library`, `rust_proc_macro`, and other
+/// [core rules of `rules_rust`](https://bazelbuild.github.io/rules_rust/defs.html).
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+#[serde(default)]
+pub struct CommonAttributes {
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub compile_data: SelectStringList,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub compile_data_glob: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub crate_features: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub data: SelectStringList,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub data_glob: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub deps: SelectList<CrateDependency>,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub extra_deps: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub deps_dev: SelectList<CrateDependency>,
+
+ pub edition: String,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub linker_script: Option<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub proc_macro_deps: SelectList<CrateDependency>,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub extra_proc_macro_deps: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub proc_macro_deps_dev: SelectList<CrateDependency>,
+
+ #[serde(skip_serializing_if = "SelectStringDict::should_skip_serializing")]
+ pub rustc_env: SelectStringDict,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub rustc_env_files: SelectStringList,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub rustc_flags: SelectStringList,
+
+ pub version: String,
+
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub tags: Vec<String>,
+}
+
+impl Default for CommonAttributes {
+ fn default() -> Self {
+ Self {
+ compile_data: Default::default(),
+ // Generated targets include all files in their package by default
+ compile_data_glob: BTreeSet::from(["**".to_owned()]),
+ crate_features: Default::default(),
+ data: Default::default(),
+ data_glob: Default::default(),
+ deps: Default::default(),
+ extra_deps: Default::default(),
+ deps_dev: Default::default(),
+ edition: Default::default(),
+ linker_script: Default::default(),
+ proc_macro_deps: Default::default(),
+ extra_proc_macro_deps: Default::default(),
+ proc_macro_deps_dev: Default::default(),
+ rustc_env: Default::default(),
+ rustc_env_files: Default::default(),
+ rustc_flags: Default::default(),
+ version: Default::default(),
+ tags: Default::default(),
+ }
+ }
+}
+
+// Build script attributes. See
+// https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+#[serde(default)]
+pub struct BuildScriptAttributes {
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub compile_data: SelectStringList,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub data: SelectStringList,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub data_glob: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub deps: SelectList<CrateDependency>,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub extra_deps: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectStringDict::should_skip_serializing")]
+ pub build_script_env: SelectStringDict,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub extra_proc_macro_deps: BTreeSet<String>,
+
+ #[serde(skip_serializing_if = "SelectList::should_skip_serializing")]
+ pub proc_macro_deps: SelectList<CrateDependency>,
+
+ #[serde(skip_serializing_if = "SelectStringDict::should_skip_serializing")]
+ pub rustc_env: SelectStringDict,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub rustc_flags: SelectStringList,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub rustc_env_files: SelectStringList,
+
+ #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
+ pub tools: SelectStringList,
+
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub links: Option<String>,
+}
+
+impl Default for BuildScriptAttributes {
+ fn default() -> Self {
+ Self {
+ compile_data: Default::default(),
+ data: Default::default(),
+ // Build scripts include all sources by default
+ data_glob: BTreeSet::from(["**".to_owned()]),
+ deps: Default::default(),
+ extra_deps: Default::default(),
+ build_script_env: Default::default(),
+ extra_proc_macro_deps: Default::default(),
+ proc_macro_deps: Default::default(),
+ rustc_env: Default::default(),
+ rustc_flags: Default::default(),
+ rustc_env_files: Default::default(),
+ tools: Default::default(),
+ links: Default::default(),
+ }
+ }
+}
+
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize, Clone)]
+#[serde(default)]
+pub struct CrateContext {
+ /// The package name of the current crate
+ pub name: String,
+
+ /// The full version of the current crate
+ pub version: String,
+
+ /// Optional source annotations if they were discoverable in the
+ /// lockfile. Workspace Members will not have source annotations and
+ /// potentially others.
+ pub repository: Option<SourceAnnotation>,
+
+ /// A list of all targets (lib, proc-macro, bin) associated with this package
+ pub targets: Vec<Rule>,
+
+ /// The name of the crate's root library target. This is the target that a dependent
+ /// would get if they were to depend on `{crate_name}`.
+ pub library_target_name: Option<String>,
+
+ /// A set of attributes common to most [Rule] types or target types.
+ pub common_attrs: CommonAttributes,
+
+ /// Optional attributes for build scripts. This field is only populated if
+ /// a build script (`custom-build`) target is defined for the crate.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub build_script_attrs: Option<BuildScriptAttributes>,
+
+ /// The license used by the crate
+ pub license: Option<String>,
+
+ /// Additional text to add to the generated BUILD file.
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub additive_build_file_content: Option<String>,
+}
+
+impl CrateContext {
+ pub fn new(
+ annotation: &CrateAnnotation,
+ packages: &BTreeMap<PackageId, Package>,
+ source_annotations: &BTreeMap<PackageId, SourceAnnotation>,
+ extras: &BTreeMap<CrateId, PairredExtras>,
+ include_build_scripts: bool,
+ ) -> Self {
+ let package: &Package = &packages[&annotation.node.id];
+ let current_crate_id = CrateId::new(package.name.clone(), package.version.to_string());
+
+ let new_crate_dep = |dep: Dependency| -> CrateDependency {
+ let pkg = &packages[&dep.package_id];
+
+ // Unfortunately, The package graph and resolve graph of cargo metadata have different representations
+ // for the crate names (resolve graph sanitizes names to match module names) so to get the rest of this
+ // content to align when rendering, the dependency target needs to be explicitly sanitized.
+ let target = sanitize_module_name(&dep.target_name);
+
+ CrateDependency {
+ id: CrateId::new(pkg.name.clone(), pkg.version.to_string()),
+ target,
+ alias: dep.alias,
+ }
+ };
+
+ // Convert the dependencies into renderable strings
+ let deps = annotation.deps.normal_deps.clone().map(new_crate_dep);
+ let deps_dev = annotation.deps.normal_dev_deps.clone().map(new_crate_dep);
+ let proc_macro_deps = annotation.deps.proc_macro_deps.clone().map(new_crate_dep);
+ let proc_macro_deps_dev = annotation
+ .deps
+ .proc_macro_dev_deps
+ .clone()
+ .map(new_crate_dep);
+
+ // Gather all "common" attributes
+ let mut common_attrs = CommonAttributes {
+ crate_features: annotation.node.features.iter().cloned().collect(),
+ deps,
+ deps_dev,
+ edition: package.edition.clone(),
+ proc_macro_deps,
+ proc_macro_deps_dev,
+ version: package.version.to_string(),
+ ..Default::default()
+ };
+
+ let include_build_scripts =
+ Self::crate_includes_build_script(package, extras, include_build_scripts);
+
+ // Iterate over each target and produce a Bazel target for all supported "kinds"
+ let targets = Self::collect_targets(&annotation.node, packages, include_build_scripts);
+
+ // Parse the library crate name from the set of included targets
+ let library_target_name = {
+ let lib_targets: Vec<&TargetAttributes> = targets
+ .iter()
+ .filter_map(|t| match t {
+ Rule::ProcMacro(attrs) => Some(attrs),
+ Rule::Library(attrs) => Some(attrs),
+ _ => None,
+ })
+ .collect();
+
+ // TODO: There should only be at most 1 library target. This case
+ // should be handled in a more intelligent way.
+ assert!(lib_targets.len() <= 1);
+ lib_targets
+ .iter()
+ .last()
+ .map(|attr| attr.crate_name.clone())
+ };
+
+ // Gather any build-script related attributes
+ let build_script_target = targets.iter().find_map(|r| match r {
+ Rule::BuildScript(attr) => Some(attr),
+ _ => None,
+ });
+
+ let build_script_attrs = if let Some(target) = build_script_target {
+ // Track the build script dependency
+ common_attrs.deps.insert(
+ CrateDependency {
+ id: current_crate_id,
+ target: target.crate_name.clone(),
+ alias: None,
+ },
+ None,
+ );
+
+ let build_deps = annotation.deps.build_deps.clone().map(new_crate_dep);
+ let build_proc_macro_deps = annotation
+ .deps
+ .build_proc_macro_deps
+ .clone()
+ .map(new_crate_dep);
+
+ Some(BuildScriptAttributes {
+ deps: build_deps,
+ proc_macro_deps: build_proc_macro_deps,
+ links: package.links.clone(),
+ ..Default::default()
+ })
+ } else {
+ None
+ };
+
+ // Save the repository information for the current crate
+ let repository = source_annotations.get(&package.id).cloned();
+
+ // Identify the license type
+ let license = package.license.clone();
+
+ // Create the crate's context and apply extra settings
+ CrateContext {
+ name: package.name.clone(),
+ version: package.version.to_string(),
+ repository,
+ targets,
+ library_target_name,
+ common_attrs,
+ build_script_attrs,
+ license,
+ additive_build_file_content: None,
+ }
+ .with_overrides(extras)
+ }
+
+ fn with_overrides(mut self, extras: &BTreeMap<CrateId, PairredExtras>) -> Self {
+ let id = CrateId::new(self.name.clone(), self.version.clone());
+
+ // Insert all overrides/extras
+ if let Some(pairred_override) = extras.get(&id) {
+ let crate_extra = &pairred_override.crate_extra;
+
+ // Deps
+ if let Some(extra) = &crate_extra.deps {
+ self.common_attrs.extra_deps = extra.clone();
+ }
+
+ // Proc macro deps
+ if let Some(extra) = &crate_extra.proc_macro_deps {
+ self.common_attrs.extra_proc_macro_deps = extra.clone();
+ }
+
+ // Compile data
+ if let Some(extra) = &crate_extra.compile_data {
+ for data in extra.iter() {
+ self.common_attrs.compile_data.insert(data.clone(), None);
+ }
+ }
+
+ // Compile data glob
+ if let Some(extra) = &crate_extra.compile_data_glob {
+ self.common_attrs.compile_data_glob.extend(extra.clone());
+ }
+
+ // Crate features
+ if let Some(extra) = &crate_extra.crate_features {
+ for data in extra.iter() {
+ self.common_attrs.crate_features.insert(data.clone());
+ }
+ }
+
+ // Data
+ if let Some(extra) = &crate_extra.data {
+ for data in extra.iter() {
+ self.common_attrs.data.insert(data.clone(), None);
+ }
+ }
+
+ // Data glob
+ if let Some(extra) = &crate_extra.data_glob {
+ self.common_attrs.data_glob.extend(extra.clone());
+ }
+
+ // Rustc flags
+ // TODO: SelectList is currently backed by `BTreeSet` which is generally incorrect
+ // for rustc flags. Should SelectList be refactored?
+ if let Some(extra) = &crate_extra.rustc_flags {
+ for data in extra.iter() {
+ self.common_attrs.rustc_flags.insert(data.clone(), None);
+ }
+ }
+
+ // Rustc env
+ if let Some(extra) = &crate_extra.rustc_env {
+ self.common_attrs.rustc_env.insert(extra.clone(), None);
+ }
+
+ // Rustc env files
+ if let Some(extra) = &crate_extra.rustc_env_files {
+ for data in extra.iter() {
+ self.common_attrs.rustc_env_files.insert(data.clone(), None);
+ }
+ }
+
+ // Build script Attributes
+ if let Some(attrs) = &mut self.build_script_attrs {
+ // Deps
+ if let Some(extra) = &crate_extra.build_script_deps {
+ attrs.extra_deps = extra.clone();
+ }
+
+ // Proc macro deps
+ if let Some(extra) = &crate_extra.build_script_proc_macro_deps {
+ attrs.extra_proc_macro_deps = extra.clone();
+ }
+
+ // Data
+ if let Some(extra) = &crate_extra.build_script_data {
+ for data in extra {
+ attrs.data.insert(data.clone(), None);
+ }
+ }
+
+ // Data glob
+ if let Some(extra) = &crate_extra.build_script_data_glob {
+ attrs.data_glob.extend(extra.clone());
+ }
+
+ // Rustc env
+ if let Some(extra) = &crate_extra.build_script_rustc_env {
+ attrs.rustc_env.insert(extra.clone(), None);
+ }
+
+ // Build script env
+ if let Some(extra) = &crate_extra.build_script_env {
+ attrs.build_script_env.insert(extra.clone(), None);
+ }
+ }
+
+ // Extra build contents
+ self.additive_build_file_content = crate_extra
+ .additive_build_file_content
+ .as_ref()
+ .map(|content| {
+ // For prettier rendering, dedent the build contents
+ textwrap::dedent(content)
+ });
+
+ // Git shallow_since
+ if let Some(SourceAnnotation::Git { shallow_since, .. }) = &mut self.repository {
+ *shallow_since = crate_extra.shallow_since.clone()
+ }
+
+ // Patch attributes
+ if let Some(repository) = &mut self.repository {
+ match repository {
+ SourceAnnotation::Git {
+ patch_args,
+ patch_tool,
+ patches,
+ ..
+ } => {
+ *patch_args = crate_extra.patch_args.clone();
+ *patch_tool = crate_extra.patch_tool.clone();
+ *patches = crate_extra.patches.clone();
+ }
+ SourceAnnotation::Http {
+ patch_args,
+ patch_tool,
+ patches,
+ ..
+ } => {
+ *patch_args = crate_extra.patch_args.clone();
+ *patch_tool = crate_extra.patch_tool.clone();
+ *patches = crate_extra.patches.clone();
+ }
+ }
+ }
+ }
+
+ self
+ }
+
+ /// Determine whether or not a crate __should__ include a build script
+ /// (build.rs) if it happens to have one.
+ fn crate_includes_build_script(
+ package: &Package,
+ overrides: &BTreeMap<CrateId, PairredExtras>,
+ default_generate_build_script: bool,
+ ) -> bool {
+ // Locate extra settings for the current package.
+ let settings = overrides
+ .iter()
+ .find(|(_, settings)| settings.package_id == package.id);
+
+ // If the crate has extra settings, which explicitly set `gen_build_script`, always use
+ // this value, otherwise, fallback to the provided default.
+ settings
+ .and_then(|(_, settings)| settings.crate_extra.gen_build_script)
+ .unwrap_or(default_generate_build_script)
+ }
+
+ /// Collect all Bazel targets that should be generated for a particular Package
+ fn collect_targets(
+ node: &Node,
+ packages: &BTreeMap<PackageId, Package>,
+ include_build_scripts: bool,
+ ) -> Vec<Rule> {
+ let package = &packages[&node.id];
+
+ let package_root = package
+ .manifest_path
+ .as_std_path()
+ .parent()
+ .expect("Every manifest should have a parent directory");
+
+ package
+ .targets
+ .iter()
+ .flat_map(|target| {
+ target
+ .kind
+ .iter()
+ .filter_map(|kind| {
+ // Unfortunately, The package graph and resolve graph of cargo metadata have different representations
+ // for the crate names (resolve graph sanitizes names to match module names) so to get the rest of this
+ // content to align when rendering, the package target names are always sanitized.
+ let crate_name = sanitize_module_name(&target.name);
+
+ // Locate the crate's root source file relative to the package root normalized for unix
+ let crate_root =
+ pathdiff::diff_paths(target.src_path.to_string(), package_root).map(
+ // Normalize the path so that it always renders the same regardless of platform
+ |root| root.to_string_lossy().replace("\\", "/"),
+ );
+
+ // Conditionally check to see if the dependencies is a build-script target
+ if include_build_scripts && kind == "custom-build" {
+ return Some(Rule::BuildScript(TargetAttributes {
+ crate_name,
+ crate_root,
+ srcs: Glob::new_rust_srcs(),
+ }));
+ }
+
+ // Check to see if the dependencies is a proc-macro target
+ if kind == "proc-macro" {
+ return Some(Rule::ProcMacro(TargetAttributes {
+ crate_name,
+ crate_root,
+ srcs: Glob::new_rust_srcs(),
+ }));
+ }
+
+ // Check to see if the dependencies is a library target
+ if ["lib", "rlib"].contains(&kind.as_str()) {
+ return Some(Rule::Library(TargetAttributes {
+ crate_name,
+ crate_root,
+ srcs: Glob::new_rust_srcs(),
+ }));
+ }
+
+ // Check to see if the dependencies is a library target
+ if kind == "bin" {
+ return Some(Rule::Binary(TargetAttributes {
+ crate_name: target.name.clone(),
+ crate_root,
+ srcs: Glob::new_rust_srcs(),
+ }));
+ }
+
+ None
+ })
+ .collect::<Vec<Rule>>()
+ })
+ .collect()
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use crate::config::CrateAnnotations;
+ use crate::metadata::Annotations;
+
+ fn common_annotations() -> Annotations {
+ Annotations::new(
+ crate::test::metadata::common(),
+ crate::test::lockfile::common(),
+ crate::config::Config::default(),
+ )
+ .unwrap()
+ }
+
+ #[test]
+ fn new_context() {
+ let annotations = common_annotations();
+
+ let crate_annotation = &annotations.metadata.crates[&PackageId {
+ repr: "common 0.1.0 (path+file://{TEMP_DIR}/common)".to_owned(),
+ }];
+
+ let context = CrateContext::new(
+ crate_annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &annotations.pairred_extras,
+ false,
+ );
+
+ assert_eq!(context.name, "common");
+ assert_eq!(
+ context.targets,
+ vec![
+ Rule::Library(TargetAttributes {
+ crate_name: "common".to_owned(),
+ crate_root: Some("lib.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ }),
+ Rule::Binary(TargetAttributes {
+ crate_name: "common-bin".to_owned(),
+ crate_root: Some("main.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ }),
+ ]
+ );
+ }
+
+ #[test]
+ fn context_with_overrides() {
+ let annotations = common_annotations();
+
+ let package_id = PackageId {
+ repr: "common 0.1.0 (path+file://{TEMP_DIR}/common)".to_owned(),
+ };
+
+ let crate_annotation = &annotations.metadata.crates[&package_id];
+
+ let mut pairred_extras = BTreeMap::new();
+ pairred_extras.insert(
+ CrateId::new("common".to_owned(), "0.1.0".to_owned()),
+ PairredExtras {
+ package_id,
+ crate_extra: CrateAnnotations {
+ data_glob: Some(BTreeSet::from(["**/data_glob/**".to_owned()])),
+ ..CrateAnnotations::default()
+ },
+ },
+ );
+
+ let context = CrateContext::new(
+ crate_annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &pairred_extras,
+ false,
+ );
+
+ assert_eq!(context.name, "common");
+ assert_eq!(
+ context.targets,
+ vec![
+ Rule::Library(TargetAttributes {
+ crate_name: "common".to_owned(),
+ crate_root: Some("lib.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ }),
+ Rule::Binary(TargetAttributes {
+ crate_name: "common-bin".to_owned(),
+ crate_root: Some("main.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ }),
+ ]
+ );
+ assert_eq!(
+ context.common_attrs.data_glob,
+ BTreeSet::from(["**/data_glob/**".to_owned()])
+ );
+ }
+
+ fn build_script_annotations() -> Annotations {
+ Annotations::new(
+ crate::test::metadata::build_scripts(),
+ crate::test::lockfile::build_scripts(),
+ crate::config::Config::default(),
+ )
+ .unwrap()
+ }
+
+ fn crate_type_annotations() -> Annotations {
+ Annotations::new(
+ crate::test::metadata::crate_types(),
+ crate::test::lockfile::crate_types(),
+ crate::config::Config::default(),
+ )
+ .unwrap()
+ }
+
+ #[test]
+ fn context_with_build_script() {
+ let annotations = build_script_annotations();
+
+ let package_id = PackageId {
+ repr: "openssl-sys 0.9.72 (registry+https://github.com/rust-lang/crates.io-index)"
+ .to_owned(),
+ };
+
+ let crate_annotation = &annotations.metadata.crates[&package_id];
+
+ let context = CrateContext::new(
+ crate_annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &annotations.pairred_extras,
+ true,
+ );
+
+ assert_eq!(context.name, "openssl-sys");
+ assert!(context.build_script_attrs.is_some());
+ assert_eq!(
+ context.targets,
+ vec![
+ Rule::Library(TargetAttributes {
+ crate_name: "openssl_sys".to_owned(),
+ crate_root: Some("src/lib.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ }),
+ Rule::BuildScript(TargetAttributes {
+ crate_name: "build_script_main".to_owned(),
+ crate_root: Some("build/main.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ })
+ ]
+ );
+
+ // Cargo build scripts should include all sources
+ assert!(context.build_script_attrs.unwrap().data_glob.contains("**"));
+ }
+
+ #[test]
+ fn context_disabled_build_script() {
+ let annotations = build_script_annotations();
+
+ let package_id = PackageId {
+ repr: "openssl-sys 0.9.72 (registry+https://github.com/rust-lang/crates.io-index)"
+ .to_owned(),
+ };
+
+ let crate_annotation = &annotations.metadata.crates[&package_id];
+
+ let context = CrateContext::new(
+ crate_annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &annotations.pairred_extras,
+ false,
+ );
+
+ assert_eq!(context.name, "openssl-sys");
+ assert!(context.build_script_attrs.is_none());
+ assert_eq!(
+ context.targets,
+ vec![Rule::Library(TargetAttributes {
+ crate_name: "openssl_sys".to_owned(),
+ crate_root: Some("src/lib.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ })],
+ );
+ }
+
+ #[test]
+ fn context_rlib_crate_type() {
+ let annotations = crate_type_annotations();
+
+ let package_id = PackageId {
+ repr: "sysinfo 0.22.5 (registry+https://github.com/rust-lang/crates.io-index)"
+ .to_owned(),
+ };
+
+ let crate_annotation = &annotations.metadata.crates[&package_id];
+
+ let context = CrateContext::new(
+ crate_annotation,
+ &annotations.metadata.packages,
+ &annotations.lockfile.crates,
+ &annotations.pairred_extras,
+ false,
+ );
+
+ assert_eq!(context.name, "sysinfo");
+ assert!(context.build_script_attrs.is_none());
+ assert_eq!(
+ context.targets,
+ vec![Rule::Library(TargetAttributes {
+ crate_name: "sysinfo".to_owned(),
+ crate_root: Some("src/lib.rs".to_owned()),
+ srcs: Glob::new_rust_srcs(),
+ })],
+ );
+ }
+}
diff --git a/crate_universe/src/context/platforms.rs b/crate_universe/src/context/platforms.rs
new file mode 100644
index 0000000..724dbd0
--- /dev/null
+++ b/crate_universe/src/context/platforms.rs
@@ -0,0 +1,269 @@
+use std::collections::{BTreeMap, BTreeSet, HashMap};
+
+use anyhow::{anyhow, Context, Result};
+use cfg_expr::targets::{get_builtin_target_by_triple, TargetInfo};
+use cfg_expr::{Expression, Predicate};
+
+use crate::context::CrateContext;
+use crate::utils::starlark::Select;
+
+/// Walk through all dependencies in a [CrateContext] list for all configuration specific
+/// dependencies to produce a mapping of configuration to compatible platform triples.
+pub fn resolve_cfg_platforms(
+ crates: Vec<&CrateContext>,
+ supported_platform_triples: &BTreeSet<String>,
+) -> Result<BTreeMap<String, BTreeSet<String>>> {
+ // Collect all unique configurations from all dependencies into a single set
+ let configurations: BTreeSet<String> = crates
+ .iter()
+ .flat_map(|ctx| {
+ let attr = &ctx.common_attrs;
+ attr.deps
+ .configurations()
+ .into_iter()
+ .chain(attr.deps_dev.configurations().into_iter())
+ .chain(attr.proc_macro_deps.configurations().into_iter())
+ .chain(attr.proc_macro_deps_dev.configurations().into_iter())
+ // Chain the build dependencies if some are defined
+ .chain(if let Some(attr) = &ctx.build_script_attrs {
+ attr.deps
+ .configurations()
+ .into_iter()
+ .chain(attr.proc_macro_deps.configurations().into_iter())
+ .collect::<BTreeSet<Option<&String>>>()
+ .into_iter()
+ } else {
+ BTreeSet::new().into_iter()
+ })
+ .flatten()
+ })
+ .cloned()
+ .collect();
+
+ // Generate target information for each triple string
+ let target_infos = supported_platform_triples
+ .iter()
+ .map(|t| match get_builtin_target_by_triple(t) {
+ Some(info) => Ok(info),
+ None => Err(anyhow!(
+ "Invalid platform triple in supported platforms: {}",
+ t
+ )),
+ })
+ .collect::<Result<Vec<&'static TargetInfo>>>()?;
+
+ // `cfg-expr` does not understand configurations that are simply platform triples
+ // (`x86_64-unknown-linux-gun` vs `cfg(target = "x86_64-unkonwn-linux-gnu")`). So
+ // in order to parse configurations, the text is renamed for the check but the
+ // original is retained for comaptibility with the manifest.
+ let rename = |cfg: &str| -> String { format!("cfg(target = \"{}\")", cfg) };
+ let original_cfgs: HashMap<String, String> = configurations
+ .iter()
+ .filter(|cfg| !cfg.starts_with("cfg("))
+ .map(|cfg| (rename(cfg), cfg.clone()))
+ .collect();
+
+ configurations
+ .into_iter()
+ // `cfg-expr` requires that the expressions be actual `cfg` expressions. Any time
+ // there's a target triple (which is a valid constraint), convert it to a cfg expression.
+ .map(|cfg| match cfg.starts_with("cfg(") {
+ true => cfg.to_string(),
+ false => rename(&cfg),
+ })
+ // Check the current configuration with against each supported triple
+ .map(|cfg| {
+ let expression = Expression::parse(&cfg)
+ .context(format!("Failed to parse expression: '{}'", cfg))?;
+
+ let triples = target_infos
+ .iter()
+ .filter(|info| {
+ expression.eval(|p| match p {
+ Predicate::Target(tp) => tp.matches(**info),
+ Predicate::KeyValue { key, val } => {
+ *key == "target" && val == &info.triple.as_str()
+ }
+ // For now there is no other kind of matching
+ _ => false,
+ })
+ })
+ .map(|info| info.triple.to_string())
+ .collect();
+
+ // Map any renamed configurations back to their original IDs
+ let cfg = match original_cfgs.get(&cfg) {
+ Some(orig) => orig.clone(),
+ None => cfg,
+ };
+
+ Ok((cfg, triples))
+ })
+ .collect()
+}
+
+#[cfg(test)]
+mod test {
+ use crate::config::CrateId;
+ use crate::context::crate_context::CrateDependency;
+ use crate::context::CommonAttributes;
+ use crate::utils::starlark::SelectList;
+
+ use super::*;
+
+ fn supported_platform_triples() -> BTreeSet<String> {
+ BTreeSet::from([
+ "aarch64-apple-darwin".to_owned(),
+ "aarch64-apple-ios".to_owned(),
+ "aarch64-linux-android".to_owned(),
+ "aarch64-unknown-linux-gnu".to_owned(),
+ "arm-unknown-linux-gnueabi".to_owned(),
+ "armv7-unknown-linux-gnueabi".to_owned(),
+ "i686-apple-darwin".to_owned(),
+ "i686-linux-android".to_owned(),
+ "i686-pc-windows-msvc".to_owned(),
+ "i686-unknown-freebsd".to_owned(),
+ "i686-unknown-linux-gnu".to_owned(),
+ "powerpc-unknown-linux-gnu".to_owned(),
+ "s390x-unknown-linux-gnu".to_owned(),
+ "wasm32-unknown-unknown".to_owned(),
+ "wasm32-wasi".to_owned(),
+ "x86_64-apple-darwin".to_owned(),
+ "x86_64-apple-ios".to_owned(),
+ "x86_64-linux-android".to_owned(),
+ "x86_64-pc-windows-msvc".to_owned(),
+ "x86_64-unknown-freebsd".to_owned(),
+ "x86_64-unknown-linux-gnu".to_owned(),
+ ])
+ }
+
+ #[test]
+ fn resolve_no_targeted() {
+ let mut deps = SelectList::default();
+ deps.insert(
+ CrateDependency {
+ id: CrateId::new("mock_crate_b".to_owned(), "0.1.0".to_owned()),
+ target: "mock_crate_b".to_owned(),
+ alias: None,
+ },
+ None,
+ );
+
+ let context = CrateContext {
+ name: "mock_crate_a".to_owned(),
+ version: "0.1.0".to_owned(),
+ common_attrs: CommonAttributes {
+ deps,
+ ..CommonAttributes::default()
+ },
+ ..CrateContext::default()
+ };
+
+ let configurations =
+ resolve_cfg_platforms(vec![&context], &supported_platform_triples()).unwrap();
+
+ assert_eq!(configurations, BTreeMap::new(),)
+ }
+
+ #[test]
+ fn resolve_targeted() {
+ let configuration = r#"cfg(target = "x86_64-unknown-linux-gnu")"#.to_owned();
+ let mut deps = SelectList::default();
+ deps.insert(
+ CrateDependency {
+ id: CrateId::new("mock_crate_b".to_owned(), "0.1.0".to_owned()),
+ target: "mock_crate_b".to_owned(),
+ alias: None,
+ },
+ Some(configuration.clone()),
+ );
+
+ let context = CrateContext {
+ name: "mock_crate_a".to_owned(),
+ version: "0.1.0".to_owned(),
+ common_attrs: CommonAttributes {
+ deps,
+ ..CommonAttributes::default()
+ },
+ ..CrateContext::default()
+ };
+
+ let configurations =
+ resolve_cfg_platforms(vec![&context], &supported_platform_triples()).unwrap();
+
+ assert_eq!(
+ configurations,
+ BTreeMap::from([(
+ configuration,
+ BTreeSet::from(["x86_64-unknown-linux-gnu".to_owned()])
+ )])
+ );
+ }
+
+ #[test]
+ fn resolve_platforms() {
+ let configuration = r#"x86_64-unknown-linux-gnu"#.to_owned();
+ let mut deps = SelectList::default();
+ deps.insert(
+ CrateDependency {
+ id: CrateId::new("mock_crate_b".to_owned(), "0.1.0".to_owned()),
+ target: "mock_crate_b".to_owned(),
+ alias: None,
+ },
+ Some(configuration.clone()),
+ );
+
+ let context = CrateContext {
+ name: "mock_crate_a".to_owned(),
+ version: "0.1.0".to_owned(),
+ common_attrs: CommonAttributes {
+ deps,
+ ..CommonAttributes::default()
+ },
+ ..CrateContext::default()
+ };
+
+ let configurations =
+ resolve_cfg_platforms(vec![&context], &supported_platform_triples()).unwrap();
+
+ assert_eq!(
+ configurations,
+ BTreeMap::from([(
+ configuration,
+ BTreeSet::from(["x86_64-unknown-linux-gnu".to_owned()])
+ )])
+ );
+ }
+
+ #[test]
+ fn resolve_unsupported_targeted() {
+ let configuration = r#"cfg(target = "x86_64-unknown-unknown")"#.to_owned();
+ let mut deps = SelectList::default();
+ deps.insert(
+ CrateDependency {
+ id: CrateId::new("mock_crate_b".to_owned(), "0.1.0".to_owned()),
+ target: "mock_crate_b".to_owned(),
+ alias: None,
+ },
+ Some(configuration.clone()),
+ );
+
+ let context = CrateContext {
+ name: "mock_crate_a".to_owned(),
+ version: "0.1.0".to_owned(),
+ common_attrs: CommonAttributes {
+ deps,
+ ..CommonAttributes::default()
+ },
+ ..CrateContext::default()
+ };
+
+ let configurations =
+ resolve_cfg_platforms(vec![&context], &supported_platform_triples()).unwrap();
+
+ assert_eq!(
+ configurations,
+ BTreeMap::from([(configuration, BTreeSet::new())])
+ );
+ }
+}
diff --git a/crate_universe/src/lib.rs b/crate_universe/src/lib.rs
new file mode 100644
index 0000000..ae85717
--- /dev/null
+++ b/crate_universe/src/lib.rs
@@ -0,0 +1,12 @@
+pub mod cli;
+
+mod config;
+mod context;
+mod lockfile;
+mod metadata;
+mod rendering;
+mod splicing;
+mod utils;
+
+#[cfg(test)]
+mod test;
diff --git a/crate_universe/src/lockfile.rs b/crate_universe/src/lockfile.rs
new file mode 100644
index 0000000..91f4832
--- /dev/null
+++ b/crate_universe/src/lockfile.rs
@@ -0,0 +1,423 @@
+//! Utility module for interracting with different kinds of lock files
+
+use std::convert::TryFrom;
+use std::ffi::OsStr;
+use std::fs;
+use std::path::Path;
+use std::process::Command;
+use std::str::FromStr;
+
+use anyhow::{bail, Context as AnyhowContext, Result};
+use hex::ToHex;
+use serde::{Deserialize, Serialize};
+use sha2::{Digest as Sha2Digest, Sha256};
+
+use crate::config::Config;
+use crate::context::Context;
+use crate::splicing::{SplicingManifest, SplicingMetadata};
+
+#[derive(Debug)]
+pub enum LockfileKind {
+ Auto,
+ Bazel,
+ Cargo,
+}
+
+impl LockfileKind {
+ pub fn detect(path: &Path) -> Result<Self> {
+ let content = fs::read_to_string(path)?;
+
+ if serde_json::from_str::<Context>(&content).is_ok() {
+ return Ok(Self::Bazel);
+ }
+
+ if cargo_lock::Lockfile::from_str(&content).is_ok() {
+ return Ok(Self::Cargo);
+ }
+
+ bail!("Unknown Lockfile kind for {}", path.display())
+ }
+}
+
+impl FromStr for LockfileKind {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let lower = s.to_lowercase();
+ if lower == "auto" {
+ return Ok(Self::Auto);
+ }
+
+ if lower == "bazel" {
+ return Ok(Self::Bazel);
+ }
+
+ if lower == "cargo" {
+ return Ok(Self::Cargo);
+ }
+
+ bail!("Unknown LockfileKind: '{}'", s)
+ }
+}
+
+pub fn is_cargo_lockfile(path: &Path, kind: &LockfileKind) -> bool {
+ match kind {
+ LockfileKind::Auto => match LockfileKind::detect(path) {
+ Ok(kind) => matches!(kind, LockfileKind::Cargo),
+ Err(_) => false,
+ },
+ LockfileKind::Bazel => false,
+ LockfileKind::Cargo => true,
+ }
+}
+
+pub fn lock_context(
+ mut context: Context,
+ config: &Config,
+ splicing_manifest: &SplicingManifest,
+ cargo_bin: &Path,
+ rustc_bin: &Path,
+) -> Result<Context> {
+ // Ensure there is no existing checksum which could impact the lockfile results
+ context.checksum = None;
+
+ let checksum = Digest::new(&context, config, splicing_manifest, cargo_bin, rustc_bin)
+ .context("Failed to generate context digest")?;
+
+ Ok(Context {
+ checksum: Some(checksum),
+ ..context
+ })
+}
+
+/// Write a [crate::planning::PlannedContext] to disk
+pub fn write_lockfile(lockfile: Context, path: &Path, dry_run: bool) -> Result<()> {
+ let content = serde_json::to_string_pretty(&lockfile)?;
+
+ if dry_run {
+ println!("{:#?}", content);
+ } else {
+ // Ensure the parent directory exists
+ if let Some(parent) = path.parent() {
+ fs::create_dir_all(parent)?;
+ }
+ fs::write(path, content + "\n")
+ .context(format!("Failed to write file to disk: {}", path.display()))?;
+ }
+
+ Ok(())
+}
+
+#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Clone)]
+pub struct Digest(String);
+
+impl Digest {
+ pub fn new(
+ context: &Context,
+ config: &Config,
+ splicing_manifest: &SplicingManifest,
+ cargo_bin: &Path,
+ rustc_bin: &Path,
+ ) -> Result<Self> {
+ let splicing_metadata = SplicingMetadata::try_from((*splicing_manifest).clone())?;
+ let cargo_version = Self::bin_version(cargo_bin)?;
+ let rustc_version = Self::bin_version(rustc_bin)?;
+ let cargo_bazel_version = env!("CARGO_PKG_VERSION");
+
+ // Ensure the checksum of a digest is not present before computing one
+ Ok(match context.checksum {
+ Some(_) => Self::compute(
+ &Context {
+ checksum: None,
+ ..context.clone()
+ },
+ config,
+ &splicing_metadata,
+ cargo_bazel_version,
+ &cargo_version,
+ &rustc_version,
+ ),
+ None => Self::compute(
+ context,
+ config,
+ &splicing_metadata,
+ cargo_bazel_version,
+ &cargo_version,
+ &rustc_version,
+ ),
+ })
+ }
+
+ fn compute(
+ context: &Context,
+ config: &Config,
+ splicing_metadata: &SplicingMetadata,
+ cargo_bazel_version: &str,
+ cargo_version: &str,
+ rustc_version: &str,
+ ) -> Self {
+ // Since this method is private, it should be expected that context is
+ // always None. This then allows us to have this method not return a
+ // Result.
+ debug_assert!(context.checksum.is_none());
+
+ let mut hasher = Sha256::new();
+
+ hasher.update(cargo_bazel_version.as_bytes());
+ hasher.update(b"\0");
+
+ hasher.update(serde_json::to_string(context).unwrap().as_bytes());
+ hasher.update(b"\0");
+
+ hasher.update(serde_json::to_string(config).unwrap().as_bytes());
+ hasher.update(b"\0");
+
+ hasher.update(serde_json::to_string(splicing_metadata).unwrap().as_bytes());
+ hasher.update(b"\0");
+
+ hasher.update(cargo_version.as_bytes());
+ hasher.update(b"\0");
+
+ hasher.update(rustc_version.as_bytes());
+ hasher.update(b"\0");
+
+ Self(hasher.finalize().encode_hex::<String>())
+ }
+
+ fn bin_version(binary: &Path) -> Result<String> {
+ let safe_vars = [OsStr::new("HOMEDRIVE"), OsStr::new("PATHEXT")];
+ let env = std::env::vars_os().filter(|(var, _)| safe_vars.contains(&var.as_os_str()));
+
+ let output = Command::new(binary)
+ .arg("--version")
+ .env_clear()
+ .envs(env)
+ .output()?;
+
+ if !output.status.success() {
+ bail!("Failed to query cargo version")
+ }
+
+ let version = String::from_utf8(output.stdout)?;
+ Ok(version)
+ }
+}
+
+impl PartialEq<str> for Digest {
+ fn eq(&self, other: &str) -> bool {
+ self.0 == other
+ }
+}
+
+impl PartialEq<String> for Digest {
+ fn eq(&self, other: &String) -> bool {
+ &self.0 == other
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::config::{CrateAnnotations, CrateId};
+ use crate::splicing::cargo_config::{AdditionalRegistry, CargoConfig, Registry};
+
+ use super::*;
+
+ use std::collections::{BTreeMap, BTreeSet};
+ use std::fs;
+
+ #[test]
+ fn simple_digest() {
+ let context = Context::default();
+ let config = Config::default();
+ let splicing_metadata = SplicingMetadata::default();
+
+ let digest = Digest::compute(
+ &context,
+ &config,
+ &splicing_metadata,
+ "0.1.0",
+ "cargo 1.57.0 (b2e52d7ca 2021-10-21)",
+ "rustc 1.57.0 (f1edd0429 2021-11-29)",
+ );
+
+ assert_eq!(
+ digest,
+ Digest("4c8bc5de2d6d7acc7997ae9870e52bc0f0fcbc2b94076e61162078be6a69cc3b".to_owned())
+ );
+ }
+
+ #[test]
+ fn digest_with_config() {
+ let context = Context::default();
+ let config = Config {
+ generate_build_scripts: false,
+ annotations: BTreeMap::from([(
+ CrateId::new("rustonomicon".to_owned(), "1.0.0".to_owned()),
+ CrateAnnotations {
+ compile_data_glob: Some(BTreeSet::from(["arts/**".to_owned()])),
+ ..CrateAnnotations::default()
+ },
+ )]),
+ cargo_config: None,
+ supported_platform_triples: BTreeSet::from([
+ "aarch64-apple-darwin".to_owned(),
+ "aarch64-unknown-linux-gnu".to_owned(),
+ "wasm32-unknown-unknown".to_owned(),
+ "wasm32-wasi".to_owned(),
+ "x86_64-apple-darwin".to_owned(),
+ "x86_64-pc-windows-msvc".to_owned(),
+ "x86_64-unknown-freebsd".to_owned(),
+ "x86_64-unknown-linux-gnu".to_owned(),
+ ]),
+ ..Config::default()
+ };
+
+ let splicing_metadata = SplicingMetadata::default();
+
+ let digest = Digest::compute(
+ &context,
+ &config,
+ &splicing_metadata,
+ "0.1.0",
+ "cargo 1.57.0 (b2e52d7ca 2021-10-21)",
+ "rustc 1.57.0 (f1edd0429 2021-11-29)",
+ );
+
+ assert_eq!(
+ digest,
+ Digest("7a0d2f5fce05c4d433826b5c4748bec7b125b79182de598dc700e893e09077e9".to_owned())
+ );
+ }
+
+ #[test]
+ fn digest_with_splicing_metadata() {
+ let context = Context::default();
+ let config = Config::default();
+ let splicing_metadata = SplicingMetadata {
+ direct_packages: BTreeMap::from([(
+ "rustonomicon".to_owned(),
+ cargo_toml::DependencyDetail {
+ version: Some("1.0.0".to_owned()),
+ ..cargo_toml::DependencyDetail::default()
+ },
+ )]),
+ manifests: BTreeMap::new(),
+ cargo_config: None,
+ };
+
+ let digest = Digest::compute(
+ &context,
+ &config,
+ &splicing_metadata,
+ "0.1.0",
+ "cargo 1.57.0 (b2e52d7ca 2021-10-21)",
+ "rustc 1.57.0 (f1edd0429 2021-11-29)",
+ );
+
+ assert_eq!(
+ digest,
+ Digest("fb5d7854dae366d4a9ff135208c28f08c14c2608dd6c5aa1b35b6e677dd53c06".to_owned())
+ );
+ }
+
+ #[test]
+ fn digest_with_cargo_config() {
+ let context = Context::default();
+ let config = Config::default();
+ let cargo_config = CargoConfig {
+ registries: BTreeMap::from([
+ (
+ "art-crates-remote".to_owned(),
+ AdditionalRegistry {
+ index: "https://artprod.mycompany/artifactory/git/cargo-remote.git"
+ .to_owned(),
+ token: None,
+ },
+ ),
+ (
+ "crates-io".to_owned(),
+ AdditionalRegistry {
+ index: "https://github.com/rust-lang/crates.io-index".to_owned(),
+ token: None,
+ },
+ ),
+ ]),
+ registry: Registry {
+ default: "art-crates-remote".to_owned(),
+ token: None,
+ },
+ source: BTreeMap::new(),
+ };
+
+ let splicing_metadata = SplicingMetadata {
+ cargo_config: Some(cargo_config),
+ ..SplicingMetadata::default()
+ };
+
+ let digest = Digest::compute(
+ &context,
+ &config,
+ &splicing_metadata,
+ "0.1.0",
+ "cargo 1.57.0 (b2e52d7ca 2021-10-21)",
+ "rustc 1.57.0 (f1edd0429 2021-11-29)",
+ );
+
+ assert_eq!(
+ digest,
+ Digest("2b32833e4265bce03df70dbb9c2b32a78879cc02fbe88a481e3fe4a17812aca9".to_owned())
+ );
+ }
+
+ #[test]
+ fn detect_bazel_lockfile() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let lockfile = temp_dir.as_ref().join("lockfile");
+ fs::write(
+ &lockfile,
+ serde_json::to_string(&crate::context::Context::default()).unwrap(),
+ )
+ .unwrap();
+
+ let kind = LockfileKind::detect(&lockfile).unwrap();
+ assert!(matches!(kind, LockfileKind::Bazel));
+ }
+
+ #[test]
+ fn detect_cargo_lockfile() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let lockfile = temp_dir.as_ref().join("lockfile");
+ fs::write(
+ &lockfile,
+ textwrap::dedent(
+ r#"
+ version = 3
+
+ [[package]]
+ name = "detect"
+ version = "0.1.0"
+ "#,
+ ),
+ )
+ .unwrap();
+
+ let kind = LockfileKind::detect(&lockfile).unwrap();
+ assert!(matches!(kind, LockfileKind::Cargo));
+ }
+
+ #[test]
+ fn detect_invalid_lockfile() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let lockfile = temp_dir.as_ref().join("lockfile");
+ fs::write(&lockfile, "]} invalid {[").unwrap();
+
+ assert!(LockfileKind::detect(&lockfile).is_err());
+ }
+
+ #[test]
+ fn detect_missing_lockfile() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let lockfile = temp_dir.as_ref().join("lockfile");
+ assert!(LockfileKind::detect(&lockfile).is_err());
+ }
+}
diff --git a/crate_universe/src/main.rs b/crate_universe/src/main.rs
new file mode 100644
index 0000000..0a79db4
--- /dev/null
+++ b/crate_universe/src/main.rs
@@ -0,0 +1,15 @@
+//! The `cargo->bazel` binary's entrypoint
+
+use cargo_bazel::cli;
+
+fn main() -> cli::Result<()> {
+ // Parse arguments
+ let opt = cli::parse_args();
+
+ match opt {
+ cli::Options::Generate(opt) => cli::generate(opt),
+ cli::Options::Splice(opt) => cli::splice(opt),
+ cli::Options::Query(opt) => cli::query(opt),
+ cli::Options::Vendor(opt) => cli::vendor(opt),
+ }
+}
diff --git a/crate_universe/src/metadata.rs b/crate_universe/src/metadata.rs
new file mode 100644
index 0000000..0e48676
--- /dev/null
+++ b/crate_universe/src/metadata.rs
@@ -0,0 +1,241 @@
+//! Tools for gathering various kinds of metadata (Cargo.lock, Cargo metadata, Crate Index info).
+
+mod dependency;
+mod metadata_annotation;
+
+use std::env;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::process::Command;
+
+use anyhow::{bail, Context, Result};
+use cargo_lock::Lockfile as CargoLockfile;
+use cargo_metadata::{Metadata as CargoMetadata, MetadataCommand};
+
+pub use self::dependency::*;
+pub use self::metadata_annotation::*;
+
+// TODO: This should also return a set of [crate-index::IndexConfig]s for packages in metadata.packages
+/// A Trait for generating metadata (`cargo metadata` output and a lock file) from a Cargo manifest.
+pub trait MetadataGenerator {
+ fn generate<T: AsRef<Path>>(&self, manifest_path: T) -> Result<(CargoMetadata, CargoLockfile)>;
+}
+
+/// Generates Cargo metadata and a lockfile from a provided manifest.
+pub struct Generator {
+ /// The path to a `cargo` binary
+ cargo_bin: PathBuf,
+
+ /// The path to a `rustc` binary
+ rustc_bin: PathBuf,
+}
+
+impl Generator {
+ pub fn new() -> Self {
+ Generator {
+ cargo_bin: PathBuf::from(env::var("CARGO").unwrap_or_else(|_| "cargo".to_string())),
+ rustc_bin: PathBuf::from(env::var("RUSTC").unwrap_or_else(|_| "rustc".to_string())),
+ }
+ }
+
+ pub fn with_cargo(mut self, cargo_bin: PathBuf) -> Self {
+ self.cargo_bin = cargo_bin;
+ self
+ }
+
+ pub fn with_rustc(mut self, rustc_bin: PathBuf) -> Self {
+ self.rustc_bin = rustc_bin;
+ self
+ }
+}
+
+impl MetadataGenerator for Generator {
+ fn generate<T: AsRef<Path>>(&self, manifest_path: T) -> Result<(CargoMetadata, CargoLockfile)> {
+ let manifest_dir = manifest_path
+ .as_ref()
+ .parent()
+ .expect("The manifest should have a parent directory");
+ let lockfile = {
+ let lock_path = manifest_dir.join("Cargo.lock");
+ if !lock_path.exists() {
+ bail!("No `Cargo.lock` file was found with the given manifest")
+ }
+ cargo_lock::Lockfile::load(lock_path)?
+ };
+
+ let metadata = MetadataCommand::new()
+ .cargo_path(&self.cargo_bin)
+ .current_dir(manifest_dir)
+ .manifest_path(manifest_path.as_ref())
+ .other_options(["--locked".to_owned()])
+ .exec()?;
+
+ Ok((metadata, lockfile))
+ }
+}
+
+pub struct LockGenerator {
+ /// The path to a `cargo` binary
+ cargo_bin: PathBuf,
+
+ /// The path to a `rustc` binary
+ rustc_bin: PathBuf,
+}
+
+impl LockGenerator {
+ pub fn new(cargo_bin: PathBuf, rustc_bin: PathBuf) -> Self {
+ Self {
+ cargo_bin,
+ rustc_bin,
+ }
+ }
+
+ pub fn generate(
+ &self,
+ manifest_path: &Path,
+ existing_lock: &Option<PathBuf>,
+ ) -> Result<cargo_lock::Lockfile> {
+ let manifest_dir = manifest_path.parent().unwrap();
+ let generated_lockfile_path = manifest_dir.join("Cargo.lock");
+
+ let output = if let Some(lock) = existing_lock {
+ if !lock.exists() {
+ bail!(
+ "An existing lockfile path was provided but a file at '{}' does not exist",
+ lock.display()
+ )
+ }
+
+ // Install the file into the target location
+ if generated_lockfile_path.exists() {
+ fs::remove_file(&generated_lockfile_path)?;
+ }
+ fs::copy(&lock, &generated_lockfile_path)?;
+
+ // Ensure the Cargo cache is up to date to simulate the behavior
+ // of having just generated a new one
+ Command::new(&self.cargo_bin)
+ // Cargo detects config files based on `pwd` when running so
+ // to ensure user provided Cargo config files are used, it's
+ // critical to set the working directory to the manifest dir.
+ .current_dir(manifest_dir)
+ .arg("fetch")
+ .arg("--locked")
+ .arg("--manifest-path")
+ .arg(manifest_path)
+ .env("RUSTC", &self.rustc_bin)
+ .output()
+ .context(format!(
+ "Error running cargo to fetch crates '{}'",
+ manifest_path.display()
+ ))?
+ } else {
+ // Simply invoke `cargo generate-lockfile`
+ Command::new(&self.cargo_bin)
+ // Cargo detects config files based on `pwd` when running so
+ // to ensure user provided Cargo config files are used, it's
+ // critical to set the working directory to the manifest dir.
+ .current_dir(manifest_dir)
+ .arg("generate-lockfile")
+ .arg("--manifest-path")
+ .arg(manifest_path)
+ .env("RUSTC", &self.rustc_bin)
+ .output()
+ .context(format!(
+ "Error running cargo to generate lockfile '{}'",
+ manifest_path.display()
+ ))?
+ };
+
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ bail!(format!("Failed to generate lockfile: {}", output.status))
+ }
+
+ cargo_lock::Lockfile::load(&generated_lockfile_path).context(format!(
+ "Failed to load lockfile: {}",
+ generated_lockfile_path.display()
+ ))
+ }
+}
+
+/// A generator which runs `cargo vendor` on a given manifest
+pub struct VendorGenerator {
+ /// The path to a `cargo` binary
+ cargo_bin: PathBuf,
+
+ /// The path to a `rustc` binary
+ rustc_bin: PathBuf,
+}
+
+impl VendorGenerator {
+ pub fn new(cargo_bin: PathBuf, rustc_bin: PathBuf) -> Self {
+ Self {
+ cargo_bin,
+ rustc_bin,
+ }
+ }
+
+ pub fn generate(&self, manifest_path: &Path, output_dir: &Path) -> Result<()> {
+ let manifest_dir = manifest_path.parent().unwrap();
+
+ // Simply invoke `cargo generate-lockfile`
+ let output = Command::new(&self.cargo_bin)
+ // Cargo detects config files based on `pwd` when running so
+ // to ensure user provided Cargo config files are used, it's
+ // critical to set the working directory to the manifest dir.
+ .current_dir(manifest_dir)
+ .arg("vendor")
+ .arg("--manifest-path")
+ .arg(manifest_path)
+ .arg("--locked")
+ .arg("--versioned-dirs")
+ .arg(output_dir)
+ .env("RUSTC", &self.rustc_bin)
+ .output()
+ .with_context(|| {
+ format!(
+ "Error running cargo to vendor sources for manifest '{}'",
+ manifest_path.display()
+ )
+ })?;
+
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ bail!(format!("Failed to vendor sources with: {}", output.status))
+ }
+
+ Ok(())
+ }
+}
+
+/// A helper function for writing Cargo metadata to a file.
+pub fn write_metadata(path: &Path, metadata: &cargo_metadata::Metadata) -> Result<()> {
+ let content =
+ serde_json::to_string_pretty(metadata).context("Failed to serialize Cargo Metadata")?;
+
+ fs::write(path, content).context("Failed to write metadata to disk")
+}
+
+/// A helper function for deserializing Cargo metadata and lockfiles
+pub fn load_metadata(
+ metadata_path: &Path,
+ lockfile_path: Option<&Path>,
+) -> Result<(cargo_metadata::Metadata, cargo_lock::Lockfile)> {
+ let content = fs::read_to_string(metadata_path)
+ .with_context(|| format!("Failed to load Cargo Metadata: {}", metadata_path.display()))?;
+
+ let metadata =
+ serde_json::from_str(&content).context("Unable to deserialize Cargo metadata")?;
+
+ let lockfile_path = lockfile_path
+ .map(PathBuf::from)
+ .unwrap_or_else(|| metadata_path.parent().unwrap().join("Cargo.lock"));
+
+ let lockfile = cargo_lock::Lockfile::load(&lockfile_path)
+ .with_context(|| format!("Failed to load lockfile: {}", lockfile_path.display()))?;
+
+ Ok((metadata, lockfile))
+}
diff --git a/crate_universe/src/metadata/dependency.rs b/crate_universe/src/metadata/dependency.rs
new file mode 100644
index 0000000..105e4fe
--- /dev/null
+++ b/crate_universe/src/metadata/dependency.rs
@@ -0,0 +1,363 @@
+///! Gathering dependencies is the largest part of annotating.
+use cargo_metadata::{Metadata as CargoMetadata, Node, NodeDep, Package, PackageId};
+use serde::{Deserialize, Serialize};
+
+use crate::utils::sanitize_module_name;
+use crate::utils::starlark::{Select, SelectList};
+
+/// A representation of a crate dependency
+#[derive(Debug, Serialize, Deserialize, PartialEq, Eq, PartialOrd, Ord, Clone)]
+pub struct Dependency {
+ /// The PackageId of the target
+ pub package_id: PackageId,
+
+ /// The library target name of the dependency.
+ pub target_name: String,
+
+ /// The alias for the dependency from the perspective of the current package
+ pub alias: Option<String>,
+}
+
+/// A collection of [Dependency]s sorted by dependency kind.
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct DependencySet {
+ pub normal_deps: SelectList<Dependency>,
+ pub normal_dev_deps: SelectList<Dependency>,
+ pub proc_macro_deps: SelectList<Dependency>,
+ pub proc_macro_dev_deps: SelectList<Dependency>,
+ pub build_deps: SelectList<Dependency>,
+ pub build_proc_macro_deps: SelectList<Dependency>,
+}
+
+impl DependencySet {
+ /// Collect all dependencies for a given node in the resolve graph.
+ pub fn new_for_node(node: &Node, metadata: &CargoMetadata) -> Self {
+ let (normal_dev_deps, normal_deps) = {
+ let (dev, normal) = node
+ .deps
+ .iter()
+ // Do not track workspace members as dependencies. Users are expected to maintain those connections
+ .filter(|dep| !is_workspace_member(dep, metadata))
+ .filter(|dep| is_lib_package(&metadata[&dep.pkg]))
+ .filter(|dep| is_normal_dependency(dep) || is_dev_dependency(dep))
+ .partition(|dep| is_dev_dependency(dep));
+
+ (
+ collect_deps_selectable(dev, metadata),
+ collect_deps_selectable(normal, metadata),
+ )
+ };
+
+ let (proc_macro_dev_deps, proc_macro_deps) = {
+ let (dev, normal) = node
+ .deps
+ .iter()
+ // Do not track workspace members as dependencies. Users are expected to maintain those connections
+ .filter(|dep| !is_workspace_member(dep, metadata))
+ .filter(|dep| is_proc_macro_package(&metadata[&dep.pkg]))
+ .filter(|dep| !is_build_dependency(dep))
+ .partition(|dep| is_dev_dependency(dep));
+
+ (
+ collect_deps_selectable(dev, metadata),
+ collect_deps_selectable(normal, metadata),
+ )
+ };
+
+ let (build_proc_macro_deps, mut build_deps) = {
+ let (proc_macro, normal) = node
+ .deps
+ .iter()
+ // Do not track workspace members as dependencies. Users are expected to maintain those connections
+ .filter(|dep| !is_workspace_member(dep, metadata))
+ .filter(|dep| is_build_dependency(dep))
+ .filter(|dep| !is_dev_dependency(dep))
+ .partition(|dep| is_proc_macro_package(&metadata[&dep.pkg]));
+
+ (
+ collect_deps_selectable(proc_macro, metadata),
+ collect_deps_selectable(normal, metadata),
+ )
+ };
+
+ // `*-sys` packages follow slightly different rules than other dependencies. These
+ // packages seem to provide some environment variables required to build the top level
+ // package and are expected to be avialable to other build scripts. If a target depends
+ // on a `*-sys` crate for itself, so would it's build script. Hopefully this is correct.
+ // https://doc.rust-lang.org/cargo/reference/build-scripts.html#the-links-manifest-key
+ // https://doc.rust-lang.org/cargo/reference/build-scripts.html#-sys-packages
+ let sys_name = format!("{}-sys", &metadata[&node.id].name);
+ normal_deps.configurations().into_iter().for_each(|config| {
+ normal_deps
+ .get_iter(config)
+ // Iterating over known key should be safe
+ .unwrap()
+ // Add any normal dependency to build dependencies that are associated `*-sys` crates
+ .for_each(|dep| {
+ let dep_pkg_name = &metadata[&dep.package_id].name;
+ if *dep_pkg_name == sys_name {
+ build_deps.insert(dep.clone(), config.cloned())
+ }
+ });
+ });
+
+ Self {
+ normal_deps,
+ normal_dev_deps,
+ proc_macro_deps,
+ proc_macro_dev_deps,
+ build_deps,
+ build_proc_macro_deps,
+ }
+ }
+}
+
+fn collect_deps_selectable(
+ deps: Vec<&NodeDep>,
+ metadata: &cargo_metadata::Metadata,
+) -> SelectList<Dependency> {
+ let mut selectable = SelectList::default();
+
+ for dep in deps.into_iter() {
+ let dep_pkg = &metadata[&dep.pkg];
+ let target_name = get_library_target_name(dep_pkg, &dep.name);
+ let alias = get_target_alias(&dep.name, dep_pkg);
+
+ for kind_info in &dep.dep_kinds {
+ selectable.insert(
+ Dependency {
+ package_id: dep.pkg.clone(),
+ target_name: target_name.clone(),
+ alias: alias.clone(),
+ },
+ kind_info
+ .target
+ .as_ref()
+ .map(|platform| platform.to_string()),
+ );
+ }
+ }
+
+ selectable
+}
+
+fn is_lib_package(package: &Package) -> bool {
+ package.targets.iter().any(|target| {
+ target
+ .crate_types
+ .iter()
+ .any(|t| ["lib", "rlib"].contains(&t.as_str()))
+ })
+}
+
+fn is_proc_macro_package(package: &Package) -> bool {
+ package
+ .targets
+ .iter()
+ .any(|target| target.crate_types.iter().any(|t| t == "proc-macro"))
+}
+
+fn is_dev_dependency(node_dep: &NodeDep) -> bool {
+ let is_normal_dep = is_normal_dependency(node_dep);
+ let is_dev_dep = node_dep
+ .dep_kinds
+ .iter()
+ .any(|k| matches!(k.kind, cargo_metadata::DependencyKind::Development));
+
+ // In the event that a dependency is listed as both a dev and normal dependency,
+ // it's only considered a dev dependency if it's __not__ a normal dependency.
+ !is_normal_dep && is_dev_dep
+}
+
+fn is_build_dependency(node_dep: &NodeDep) -> bool {
+ node_dep
+ .dep_kinds
+ .iter()
+ .any(|k| matches!(k.kind, cargo_metadata::DependencyKind::Build))
+}
+
+fn is_normal_dependency(node_dep: &NodeDep) -> bool {
+ node_dep
+ .dep_kinds
+ .iter()
+ .any(|k| matches!(k.kind, cargo_metadata::DependencyKind::Normal))
+}
+
+fn is_workspace_member(node_dep: &NodeDep, metadata: &CargoMetadata) -> bool {
+ metadata
+ .workspace_members
+ .iter()
+ .any(|id| id == &node_dep.pkg)
+}
+
+fn get_library_target_name(package: &Package, potential_name: &str) -> String {
+ // If the potential name is not an alias in a dependent's package, a target's name
+ // should match which means we already know what the target library name is.
+ if package.targets.iter().any(|t| t.name == potential_name) {
+ return potential_name.to_string();
+ }
+
+ // Locate any library type targets
+ let lib_targets: Vec<&cargo_metadata::Target> = package
+ .targets
+ .iter()
+ .filter(|t| t.kind.iter().any(|k| k == "lib" || k == "proc-macro"))
+ .collect();
+
+ // Only one target should be found
+ assert_eq!(lib_targets.len(), 1);
+
+ let target = lib_targets.into_iter().last().unwrap();
+ target.name.clone()
+}
+
+/// The resolve graph (resolve.nodes[#].deps[#].name) of Cargo metadata uses module names
+/// for targets where packages (packages[#].targets[#].name) uses crate names. In order to
+/// determine whether or not a dependency is aliased, we compare it with all available targets
+/// on it's package. Note that target names are not guaranteed to be module names where Node
+/// dependnecies are, so we need to do a conversion to check for this
+fn get_target_alias(target_name: &str, package: &Package) -> Option<String> {
+ match package
+ .targets
+ .iter()
+ .all(|t| sanitize_module_name(&t.name) != target_name)
+ {
+ true => Some(target_name.to_string()),
+ false => None,
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use std::collections::BTreeSet;
+
+ use super::*;
+
+ use crate::test::*;
+
+ /// Locate the [cargo_metadata::Node] for the crate matching the given name
+ fn find_metadata_node<'a>(
+ name: &str,
+ metadata: &'a cargo_metadata::Metadata,
+ ) -> &'a cargo_metadata::Node {
+ metadata
+ .resolve
+ .as_ref()
+ .unwrap()
+ .nodes
+ .iter()
+ .find(|node| {
+ let pkg = &metadata[&node.id];
+ pkg.name == name
+ })
+ .unwrap()
+ }
+
+ #[test]
+ fn sys_dependencies() {
+ let metadata = metadata::build_scripts();
+
+ let openssl_node = find_metadata_node("openssl", &metadata);
+
+ let dependencies = DependencySet::new_for_node(openssl_node, &metadata);
+
+ let sys_crate = dependencies
+ .normal_deps
+ .get_iter(None)
+ .unwrap()
+ .find(|dep| {
+ let pkg = &metadata[&dep.package_id];
+ pkg.name == "openssl-sys"
+ });
+
+ // sys crates like `openssl-sys` should always be dependencies of any
+ // crate which matches it's name minus the `-sys` suffix
+ assert!(sys_crate.is_some());
+ }
+
+ #[test]
+ fn tracked_aliases() {
+ let metadata = metadata::alias();
+
+ let aliases_node = find_metadata_node("aliases", &metadata);
+ let dependencies = DependencySet::new_for_node(aliases_node, &metadata);
+
+ let aliases: Vec<&Dependency> = dependencies
+ .normal_deps
+ .get_iter(None)
+ .unwrap()
+ .filter(|dep| dep.alias.is_some())
+ .collect();
+
+ assert_eq!(aliases.len(), 2);
+
+ let expected: BTreeSet<String> = aliases
+ .into_iter()
+ .map(|dep| dep.alias.as_ref().unwrap().clone())
+ .collect();
+
+ assert_eq!(
+ expected,
+ BTreeSet::from(["pinned_log".to_owned(), "pinned_names".to_owned()])
+ );
+ }
+
+ #[test]
+ fn matched_rlib() {
+ let metadata = metadata::crate_types();
+
+ let node = find_metadata_node("crate-types", &metadata);
+ let dependencies = DependencySet::new_for_node(node, &metadata);
+
+ let rlib_deps: Vec<&Dependency> = dependencies
+ .normal_deps
+ .get_iter(None)
+ .unwrap()
+ .filter(|dep| {
+ let pkg = &metadata[&dep.package_id];
+ pkg.targets
+ .iter()
+ .any(|t| t.crate_types.contains(&"rlib".to_owned()))
+ })
+ .collect();
+
+ // Currently the only expected __explicitly__ "rlib" target in this metadata is `sysinfo`.
+ assert_eq!(rlib_deps.len(), 1);
+
+ let sysinfo_dep = rlib_deps.iter().last().unwrap();
+ assert_eq!(sysinfo_dep.target_name, "sysinfo");
+ }
+
+ #[test]
+ fn multiple_dep_kinds() {
+ let metadata = metadata::multi_cfg_dep();
+
+ let node = find_metadata_node("cpufeatures", &metadata);
+ let dependencies = DependencySet::new_for_node(node, &metadata);
+
+ let libc_cfgs: Vec<Option<String>> = dependencies
+ .normal_deps
+ .configurations()
+ .into_iter()
+ .flat_map(|conf| {
+ dependencies
+ .normal_deps
+ .get_iter(conf)
+ .expect("Iterating over known keys should never panic")
+ .filter(|dep| dep.target_name == "libc")
+ .map(move |_| conf.cloned())
+ })
+ .collect();
+
+ assert_eq!(libc_cfgs.len(), 2);
+
+ let cfg_strs: BTreeSet<String> = libc_cfgs.into_iter().flatten().collect();
+ assert_eq!(
+ cfg_strs,
+ BTreeSet::from([
+ "aarch64-apple-darwin".to_owned(),
+ "cfg(all(target_arch = \"aarch64\", target_os = \"linux\"))".to_owned(),
+ ])
+ );
+ }
+}
diff --git a/crate_universe/src/metadata/metadata_annotation.rs b/crate_universe/src/metadata/metadata_annotation.rs
new file mode 100644
index 0000000..be4cb7f
--- /dev/null
+++ b/crate_universe/src/metadata/metadata_annotation.rs
@@ -0,0 +1,555 @@
+//! Collect and store information from Cargo metadata specific to Bazel's needs
+
+use std::collections::{BTreeMap, BTreeSet};
+use std::convert::TryFrom;
+use std::path::PathBuf;
+
+use anyhow::{bail, Result};
+use cargo_metadata::{Node, Package, PackageId};
+use hex::ToHex;
+use serde::{Deserialize, Serialize};
+
+use crate::config::{Commitish, Config, CrateAnnotations, CrateId};
+use crate::metadata::dependency::DependencySet;
+use crate::splicing::{SourceInfo, WorkspaceMetadata};
+
+pub type CargoMetadata = cargo_metadata::Metadata;
+pub type CargoLockfile = cargo_lock::Lockfile;
+
+/// Additional information about a crate relative to other crates in a dependency graph.
+#[derive(Debug, Serialize, Deserialize)]
+pub struct CrateAnnotation {
+ /// The crate's node in the Cargo "resolve" graph.
+ pub node: Node,
+
+ /// The crate's sorted dependencies.
+ pub deps: DependencySet,
+}
+
+/// Additional information about a Cargo workspace's metadata.
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct MetadataAnnotation {
+ /// All packages found within the Cargo metadata
+ pub packages: BTreeMap<PackageId, Package>,
+
+ /// All [CrateAnnotation]s for all packages
+ pub crates: BTreeMap<PackageId, CrateAnnotation>,
+
+ /// All packages that are workspace members
+ pub workspace_members: BTreeSet<PackageId>,
+
+ /// The path to the directory containing the Cargo workspace that produced the metadata.
+ pub workspace_root: PathBuf,
+
+ /// Information on the Cargo workspace.
+ pub workspace_metadata: WorkspaceMetadata,
+}
+
+impl MetadataAnnotation {
+ pub fn new(metadata: CargoMetadata) -> MetadataAnnotation {
+ // UNWRAP: The workspace metadata should be written by a controlled process. This should not return a result
+ let workspace_metadata = find_workspace_metadata(&metadata).unwrap_or_default();
+
+ let resolve = metadata
+ .resolve
+ .as_ref()
+ .expect("The metadata provided requires a resolve graph")
+ .clone();
+
+ let is_node_workspace_member = |node: &Node, metadata: &CargoMetadata| -> bool {
+ metadata.workspace_members.iter().any(|pkg| pkg == &node.id)
+ };
+
+ let workspace_members: BTreeSet<PackageId> = resolve
+ .nodes
+ .iter()
+ .filter(|node| is_node_workspace_member(node, &metadata))
+ .map(|node| node.id.clone())
+ .collect();
+
+ let crates = resolve
+ .nodes
+ .iter()
+ .map(|node| {
+ (
+ node.id.clone(),
+ Self::annotate_crate(node.clone(), &metadata),
+ )
+ })
+ .collect();
+
+ let packages = metadata
+ .packages
+ .into_iter()
+ .map(|pkg| (pkg.id.clone(), pkg))
+ .collect();
+
+ MetadataAnnotation {
+ packages,
+ crates,
+ workspace_members,
+ workspace_root: PathBuf::from(metadata.workspace_root.as_std_path()),
+ workspace_metadata,
+ }
+ }
+
+ fn annotate_crate(node: Node, metadata: &CargoMetadata) -> CrateAnnotation {
+ // Gather all dependencies
+ let deps = DependencySet::new_for_node(&node, metadata);
+
+ CrateAnnotation { node, deps }
+ }
+}
+
+/// Additional information about how and where to acquire a crate's source code from.
+#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Serialize, Deserialize)]
+pub enum SourceAnnotation {
+ Git {
+ /// The Git url where to clone the source from.
+ remote: String,
+
+ /// The revision information for the git repository. This is used for
+ /// [git_repository::commit](https://docs.bazel.build/versions/main/repo/git.html#git_repository-commit),
+ /// [git_repository::tag](https://docs.bazel.build/versions/main/repo/git.html#git_repository-tag), or
+ /// [git_repository::branch](https://docs.bazel.build/versions/main/repo/git.html#git_repository-branch).
+ commitish: Commitish,
+
+ /// See [git_repository::shallow_since](https://docs.bazel.build/versions/main/repo/git.html#git_repository-shallow_since)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ shallow_since: Option<String>,
+
+ /// See [git_repository::strip_prefix](https://docs.bazel.build/versions/main/repo/git.html#git_repository-strip_prefix)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ strip_prefix: Option<String>,
+
+ /// See [git_repository::patch_args](https://docs.bazel.build/versions/main/repo/git.html#git_repository-patch_args)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patch_args: Option<Vec<String>>,
+
+ /// See [git_repository::patch_tool](https://docs.bazel.build/versions/main/repo/git.html#git_repository-patch_tool)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patch_tool: Option<String>,
+
+ /// See [git_repository::patches](https://docs.bazel.build/versions/main/repo/git.html#git_repository-patches)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patches: Option<BTreeSet<String>>,
+ },
+ Http {
+ /// See [http_archive::url](https://docs.bazel.build/versions/main/repo/http.html#http_archive-url)
+ url: String,
+
+ /// See [http_archive::sha256](https://docs.bazel.build/versions/main/repo/http.html#http_archive-sha256)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ sha256: Option<String>,
+
+ /// See [http_archive::patch_args](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patch_args)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patch_args: Option<Vec<String>>,
+
+ /// See [http_archive::patch_tool](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patch_tool)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patch_tool: Option<String>,
+
+ /// See [http_archive::patches](https://docs.bazel.build/versions/main/repo/http.html#http_archive-patches)
+ #[serde(default, skip_serializing_if = "Option::is_none")]
+ patches: Option<BTreeSet<String>>,
+ },
+}
+
+/// TODO
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
+pub struct LockfileAnnotation {
+ /// TODO
+ pub crates: BTreeMap<PackageId, SourceAnnotation>,
+}
+
+impl LockfileAnnotation {
+ pub fn new(lockfile: CargoLockfile, metadata: &CargoMetadata) -> Result<Self> {
+ let workspace_metadata = find_workspace_metadata(metadata).unwrap_or_default();
+
+ let nodes: Vec<&Node> = metadata
+ .resolve
+ .as_ref()
+ .expect("Metadata is expected to have a resolve graph")
+ .nodes
+ .iter()
+ .filter(|node| !is_workspace_member(&node.id, metadata))
+ .collect();
+
+ // Produce source annotations for each crate in the resolve graph
+ let crates = nodes
+ .iter()
+ .map(|node| {
+ Ok((
+ node.id.clone(),
+ Self::collect_source_annotations(
+ node,
+ metadata,
+ &lockfile,
+ &workspace_metadata,
+ )?,
+ ))
+ })
+ .collect::<Result<BTreeMap<PackageId, SourceAnnotation>>>()?;
+
+ Ok(Self { crates })
+ }
+
+ /// Resolve all URLs and checksum-like data for each package
+ fn collect_source_annotations(
+ node: &Node,
+ metadata: &CargoMetadata,
+ lockfile: &CargoLockfile,
+ workspace_metadata: &WorkspaceMetadata,
+ ) -> Result<SourceAnnotation> {
+ let pkg = &metadata[&node.id];
+
+ // Locate the matching lock package for the current crate
+ let lock_pkg = match cargo_meta_pkg_to_locked_pkg(pkg, &lockfile.packages) {
+ Some(lock_pkg) => lock_pkg,
+ None => bail!(
+ "Could not find lockfile entry matching metadata package '{}'",
+ pkg.name
+ ),
+ };
+
+ // Check for spliced information about a crate's network source.
+ let spliced_source_info = Self::find_source_annotation(lock_pkg, workspace_metadata);
+
+ // Parse it's source info. The check above should prevent a panic
+ let source = match lock_pkg.source.as_ref() {
+ Some(source) => source,
+ None => match spliced_source_info {
+ Some(info) => {
+ return Ok(SourceAnnotation::Http {
+ url: info.url,
+ sha256: Some(info.sha256),
+ patch_args: None,
+ patch_tool: None,
+ patches: None,
+ })
+ }
+ None => bail!(
+ "The package '{:?} {:?}' has no source info so no annotation can be made",
+ lock_pkg.name,
+ lock_pkg.version
+ ),
+ },
+ };
+
+ // Handle any git repositories
+ if let Some(git_ref) = source.git_reference() {
+ let strip_prefix = Self::extract_git_strip_prefix(pkg)?;
+
+ return Ok(SourceAnnotation::Git {
+ remote: source.url().to_string(),
+ commitish: Commitish::from(git_ref.clone()),
+ shallow_since: None,
+ strip_prefix,
+ patch_args: None,
+ patch_tool: None,
+ patches: None,
+ });
+ }
+
+ // One of the last things that should be checked is the spliced source information as
+ // other sources may more accurately represent where a crate should be downloaded.
+ if let Some(info) = spliced_source_info {
+ return Ok(SourceAnnotation::Http {
+ url: info.url,
+ sha256: Some(info.sha256),
+ patch_args: None,
+ patch_tool: None,
+ patches: None,
+ });
+ }
+
+ // Finally, In the event that no spliced source information was included in the
+ // metadata the raw source info is used for registry crates and `crates.io` is
+ // assumed to be the source.
+ if source.is_registry() {
+ return Ok(SourceAnnotation::Http {
+ url: format!(
+ "https://crates.io/api/v1/crates/{}/{}/download",
+ lock_pkg.name, lock_pkg.version,
+ ),
+ sha256: lock_pkg
+ .checksum
+ .as_ref()
+ .and_then(|sum| {
+ if sum.is_sha256() {
+ sum.as_sha256()
+ } else {
+ None
+ }
+ })
+ .map(|sum| sum.encode_hex::<String>()),
+ patch_args: None,
+ patch_tool: None,
+ patches: None,
+ });
+ }
+
+ bail!(
+ "Unable to determine source annotation for '{:?} {:?}",
+ lock_pkg.name,
+ lock_pkg.version
+ )
+ }
+
+ fn find_source_annotation(
+ package: &cargo_lock::Package,
+ metadata: &WorkspaceMetadata,
+ ) -> Option<SourceInfo> {
+ let crate_id = CrateId::new(package.name.to_string(), package.version.to_string());
+ metadata.sources.get(&crate_id).cloned()
+ }
+
+ fn extract_git_strip_prefix(pkg: &Package) -> Result<Option<String>> {
+ // {CARGO_HOME}/git/checkouts/name-hash/short-sha/[strip_prefix...]/Cargo.toml
+ let components = pkg
+ .manifest_path
+ .components()
+ .map(|v| v.to_string())
+ .collect::<Vec<_>>();
+ for (i, _) in components.iter().enumerate() {
+ let possible_components = &components[i..];
+ if possible_components.len() < 5 {
+ continue;
+ }
+ if possible_components[0] != "git"
+ || possible_components[1] != "checkouts"
+ || possible_components[possible_components.len() - 1] != "Cargo.toml"
+ {
+ continue;
+ }
+ if possible_components.len() == 5 {
+ return Ok(None);
+ }
+ return Ok(Some(
+ possible_components[4..(possible_components.len() - 1)].join("/"),
+ ));
+ }
+ bail!("Expected git package to have a manifest path of pattern {{CARGO_HOME}}/git/checkouts/[name]-[hash]/[short-sha]/.../Cargo.toml but {:?} had manifest path {}", pkg.id, pkg.manifest_path);
+ }
+}
+
+/// A pairring of a crate's package identifier to it's annotations.
+#[derive(Debug, Serialize, Deserialize)]
+pub struct PairredExtras {
+ /// The crate's package identifier
+ pub package_id: cargo_metadata::PackageId,
+
+ /// The crate's annotations
+ pub crate_extra: CrateAnnotations,
+}
+
+/// A collection of data which has been processed for optimal use in generating Bazel targets.
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct Annotations {
+ /// Annotated Cargo metadata
+ pub metadata: MetadataAnnotation,
+
+ /// Annotated Cargo lockfile
+ pub lockfile: LockfileAnnotation,
+
+ /// The current workspace's configuration settings
+ pub config: Config,
+
+ /// Pairred crate annotations
+ pub pairred_extras: BTreeMap<CrateId, PairredExtras>,
+}
+
+impl Annotations {
+ pub fn new(
+ cargo_metadata: CargoMetadata,
+ cargo_lockfile: CargoLockfile,
+ config: Config,
+ ) -> Result<Self> {
+ let lockfile_annotation = LockfileAnnotation::new(cargo_lockfile, &cargo_metadata)?;
+
+ // Annotate the cargo metadata
+ let metadata_annotation = MetadataAnnotation::new(cargo_metadata);
+
+ let mut unused_extra_annotations = config.annotations.clone();
+
+ // Ensure each override matches a particular package
+ let pairred_extras = metadata_annotation
+ .packages
+ .iter()
+ .filter_map(|(pkg_id, pkg)| {
+ let extras: Vec<CrateAnnotations> = config
+ .annotations
+ .iter()
+ .filter(|(id, _)| id.matches(pkg))
+ .map(|(id, extra)| {
+ // Mark that an annotation has been consumed
+ unused_extra_annotations.remove(id);
+
+ // Fitler out the annotation
+ extra
+ })
+ .cloned()
+ .collect();
+
+ if !extras.is_empty() {
+ Some((
+ CrateId::new(pkg.name.clone(), pkg.version.to_string()),
+ PairredExtras {
+ package_id: pkg_id.clone(),
+ crate_extra: extras.into_iter().sum(),
+ },
+ ))
+ } else {
+ None
+ }
+ })
+ .collect();
+
+ // Alert on any unused annotations
+ if !unused_extra_annotations.is_empty() {
+ bail!(
+ "Unused annotations were provided. Please remove them: {:?}",
+ unused_extra_annotations.keys()
+ );
+ }
+
+ // Annotate metadata
+ Ok(Annotations {
+ metadata: metadata_annotation,
+ lockfile: lockfile_annotation,
+ config,
+ pairred_extras,
+ })
+ }
+}
+
+fn find_workspace_metadata(cargo_metadata: &CargoMetadata) -> Option<WorkspaceMetadata> {
+ WorkspaceMetadata::try_from(cargo_metadata.workspace_metadata.clone()).ok()
+}
+
+/// Determines whether or not a package is a workspace member. This follows
+/// the Cargo definition of a workspace memeber with one exception where
+/// "extra workspace members" are *not* treated as workspace members
+fn is_workspace_member(id: &PackageId, cargo_metadata: &CargoMetadata) -> bool {
+ if cargo_metadata.workspace_members.contains(id) {
+ if let Some(data) = find_workspace_metadata(cargo_metadata) {
+ let pkg = &cargo_metadata[id];
+ let crate_id = CrateId::new(pkg.name.clone(), pkg.version.to_string());
+
+ !data.sources.contains_key(&crate_id)
+ } else {
+ true
+ }
+ } else {
+ false
+ }
+}
+
+/// Match a [cargo_metadata::Package] to a [cargo_lock::Package].
+fn cargo_meta_pkg_to_locked_pkg<'a>(
+ pkg: &Package,
+ lock_packages: &'a [cargo_lock::Package],
+) -> Option<&'a cargo_lock::Package> {
+ lock_packages
+ .iter()
+ .find(|lock_pkg| lock_pkg.name.as_str() == pkg.name && lock_pkg.version == pkg.version)
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use crate::test::*;
+
+ #[test]
+ fn test_cargo_meta_pkg_to_locked_pkg() {
+ let pkg = mock_cargo_metadata_package();
+ let lock_pkg = mock_cargo_lock_package();
+
+ assert!(cargo_meta_pkg_to_locked_pkg(&pkg, &vec![lock_pkg]).is_some())
+ }
+
+ #[test]
+ fn annotate_metadata_with_aliases() {
+ let annotations = MetadataAnnotation::new(test::metadata::alias());
+ let log_crates: BTreeMap<&PackageId, &CrateAnnotation> = annotations
+ .crates
+ .iter()
+ .filter(|(id, _)| {
+ let pkg = &annotations.packages[*id];
+ pkg.name == "log"
+ })
+ .collect();
+
+ assert_eq!(log_crates.len(), 2);
+ }
+
+ #[test]
+ fn annotate_lockfile_with_aliases() {
+ LockfileAnnotation::new(test::lockfile::alias(), &test::metadata::alias()).unwrap();
+ }
+
+ #[test]
+ fn annotate_metadata_with_build_scripts() {
+ MetadataAnnotation::new(test::metadata::build_scripts());
+ }
+
+ #[test]
+ fn annotate_lockfile_with_build_scripts() {
+ LockfileAnnotation::new(
+ test::lockfile::build_scripts(),
+ &test::metadata::build_scripts(),
+ )
+ .unwrap();
+ }
+
+ #[test]
+ fn annotate_metadata_with_no_deps() {}
+
+ #[test]
+ fn annotate_lockfile_with_no_deps() {
+ LockfileAnnotation::new(test::lockfile::no_deps(), &test::metadata::no_deps()).unwrap();
+ }
+
+ #[test]
+ fn detects_strip_prefix_for_git_repo() {
+ let crates =
+ LockfileAnnotation::new(test::lockfile::git_repos(), &test::metadata::git_repos())
+ .unwrap()
+ .crates;
+ let tracing_core = crates
+ .iter()
+ .find(|(k, _)| k.repr.starts_with("tracing-core "))
+ .map(|(_, v)| v)
+ .unwrap();
+ match tracing_core {
+ SourceAnnotation::Git {
+ strip_prefix: Some(strip_prefix),
+ ..
+ } if strip_prefix == "tracing-core" => {
+ // Matched correctly.
+ }
+ other => {
+ panic!("Wanted SourceAnnotation::Git with strip_prefix == Some(\"tracing-core\"), got: {:?}", other);
+ }
+ }
+ }
+
+ #[test]
+ fn detect_unused_annotation() {
+ // Create a config with some random annotation
+ let mut config = Config::default();
+ config.annotations.insert(
+ CrateId::new("mock-crate".to_owned(), "0.1.0".to_owned()),
+ CrateAnnotations::default(),
+ );
+
+ let result = Annotations::new(test::metadata::no_deps(), test::lockfile::no_deps(), config);
+ assert!(result.is_err());
+
+ let result_str = format!("{:?}", result);
+ assert!(result_str.contains("Unused annotations were provided. Please remove them"));
+ assert!(result_str.contains("mock-crate"));
+ }
+}
diff --git a/crate_universe/src/rendering.rs b/crate_universe/src/rendering.rs
new file mode 100644
index 0000000..a0570ba
--- /dev/null
+++ b/crate_universe/src/rendering.rs
@@ -0,0 +1,470 @@
+//! Tools for rendering and writing BUILD and other Starlark files
+
+mod template_engine;
+
+use std::collections::BTreeMap;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use anyhow::{bail, Context as AnyhowContext, Result};
+
+use crate::config::RenderConfig;
+use crate::context::Context;
+use crate::rendering::template_engine::TemplateEngine;
+use crate::splicing::default_splicing_package_crate_id;
+use crate::utils::starlark::Label;
+
+pub struct Renderer {
+ config: RenderConfig,
+ engine: TemplateEngine,
+}
+
+impl Renderer {
+ pub fn new(config: RenderConfig) -> Self {
+ let engine = TemplateEngine::new(&config);
+ Self { config, engine }
+ }
+
+ pub fn render(&self, context: &Context) -> Result<BTreeMap<PathBuf, String>> {
+ let mut output = BTreeMap::new();
+
+ output.extend(self.render_build_files(context)?);
+ output.extend(self.render_crates_module(context)?);
+
+ if let Some(vendor_mode) = &self.config.vendor_mode {
+ match vendor_mode {
+ crate::config::VendorMode::Local => {
+ // Nothing to do for local vendor crate
+ }
+ crate::config::VendorMode::Remote => {
+ output.extend(self.render_vendor_support_files(context)?);
+ }
+ }
+ }
+
+ Ok(output)
+ }
+
+ fn render_crates_module(&self, context: &Context) -> Result<BTreeMap<PathBuf, String>> {
+ let module_label = render_module_label(&self.config.crates_module_template, "defs.bzl")
+ .context("Failed to resolve string to module file label")?;
+ let module_build_label =
+ render_module_label(&self.config.crates_module_template, "BUILD.bazel")
+ .context("Failed to resolve string to module file label")?;
+
+ let mut map = BTreeMap::new();
+ map.insert(
+ Renderer::label_to_path(&module_label),
+ self.engine.render_module_bzl(context)?,
+ );
+ map.insert(
+ Renderer::label_to_path(&module_build_label),
+ self.engine.render_module_build_file(context)?,
+ );
+
+ Ok(map)
+ }
+
+ fn render_build_files(&self, context: &Context) -> Result<BTreeMap<PathBuf, String>> {
+ let default_splicing_package_id = default_splicing_package_crate_id();
+ self.engine
+ .render_crate_build_files(context)?
+ .into_iter()
+ // Do not render the default splicing package
+ .filter(|(id, _)| *id != &default_splicing_package_id)
+ // Do not render local packages
+ .filter(|(id, _)| !context.workspace_members.contains_key(id))
+ .map(|(id, content)| {
+ let ctx = &context.crates[id];
+ let label = match render_build_file_template(
+ &self.config.build_file_template,
+ &ctx.name,
+ &ctx.version,
+ ) {
+ Ok(label) => label,
+ Err(e) => bail!(e),
+ };
+
+ let filename = Renderer::label_to_path(&label);
+
+ Ok((filename, content))
+ })
+ .collect()
+ }
+
+ fn render_vendor_support_files(&self, context: &Context) -> Result<BTreeMap<PathBuf, String>> {
+ let module_label = render_module_label(&self.config.crates_module_template, "crates.bzl")
+ .context("Failed to resolve string to module file label")?;
+
+ let mut map = BTreeMap::new();
+ map.insert(
+ Renderer::label_to_path(&module_label),
+ self.engine.render_vendor_module_file(context)?,
+ );
+
+ Ok(map)
+ }
+
+ fn label_to_path(label: &Label) -> PathBuf {
+ match &label.package {
+ Some(package) => PathBuf::from(format!("{}/{}", package, label.target)),
+ None => PathBuf::from(&label.target),
+ }
+ }
+}
+
+/// Write a set of [CrateContext][crate::context::CrateContext] to disk.
+pub fn write_outputs(
+ outputs: BTreeMap<PathBuf, String>,
+ out_dir: &Path,
+ dry_run: bool,
+) -> Result<()> {
+ let outputs: BTreeMap<PathBuf, String> = outputs
+ .into_iter()
+ .map(|(path, content)| (out_dir.join(path), content))
+ .collect();
+
+ if dry_run {
+ for (path, content) in outputs {
+ println!(
+ "==============================================================================="
+ );
+ println!("{}", path.display());
+ println!(
+ "==============================================================================="
+ );
+ println!("{}\n", content);
+ }
+ } else {
+ for (path, content) in outputs {
+ // Ensure the output directory exists
+ fs::create_dir_all(
+ path.parent()
+ .expect("All file paths should have valid directories"),
+ )?;
+
+ fs::write(&path, content.as_bytes())
+ .context(format!("Failed to write file to disk: {}", path.display()))?;
+ }
+ }
+
+ Ok(())
+}
+
+/// Render the Bazel label of a crate
+pub fn render_crate_bazel_label(
+ template: &str,
+ repository_name: &str,
+ name: &str,
+ version: &str,
+ target: &str,
+) -> String {
+ template
+ .replace("{repository}", repository_name)
+ .replace("{name}", name)
+ .replace("{version}", version)
+ .replace("{target}", target)
+}
+
+/// Render the Bazel label of a crate
+pub fn render_crate_bazel_repository(
+ template: &str,
+ repository_name: &str,
+ name: &str,
+ version: &str,
+) -> String {
+ template
+ .replace("{repository}", repository_name)
+ .replace("{name}", name)
+ .replace("{version}", version)
+}
+
+/// Render the Bazel label of a crate
+pub fn render_crate_build_file(template: &str, name: &str, version: &str) -> String {
+ template
+ .replace("{name}", name)
+ .replace("{version}", version)
+}
+
+/// Render the Bazel label of a vendor module label
+pub fn render_module_label(template: &str, name: &str) -> Result<Label> {
+ Label::from_str(&template.replace("{file}", name))
+}
+
+/// Render the Bazel label of a platform triple
+pub fn render_platform_constraint_label(template: &str, triple: &str) -> String {
+ template.replace("{triple}", triple)
+}
+
+fn render_build_file_template(template: &str, name: &str, version: &str) -> Result<Label> {
+ Label::from_str(
+ &template
+ .replace("{name}", name)
+ .replace("{version}", version),
+ )
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use crate::config::{Config, CrateId, VendorMode};
+ use crate::context::crate_context::{CrateContext, Rule};
+ use crate::context::{BuildScriptAttributes, Context, TargetAttributes};
+ use crate::metadata::Annotations;
+ use crate::test;
+
+ fn mock_render_config() -> RenderConfig {
+ serde_json::from_value(serde_json::json!({
+ "repository_name": "test_rendering"
+ }))
+ .unwrap()
+ }
+
+ fn mock_target_attributes() -> TargetAttributes {
+ TargetAttributes {
+ crate_name: "mock_crate".to_owned(),
+ crate_root: Some("src/root.rs".to_owned()),
+ ..TargetAttributes::default()
+ }
+ }
+
+ #[test]
+ fn render_rust_library() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Library(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ assert!(build_file_content.contains("rust_library("));
+ assert!(build_file_content.contains("name = \"mock_crate\""));
+ }
+
+ #[test]
+ fn render_cargo_build_script() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::BuildScript(TargetAttributes {
+ crate_name: "build_script_build".to_owned(),
+ crate_root: Some("build.rs".to_owned()),
+ ..TargetAttributes::default()
+ })],
+ // Build script attributes are required.
+ build_script_attrs: Some(BuildScriptAttributes::default()),
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ assert!(build_file_content.contains("cargo_build_script("));
+ assert!(build_file_content.contains("name = \"build_script_build\""));
+
+ // Ensure `cargo_build_script` requirements are met
+ assert!(build_file_content.contains("name = \"mock_crate_build_script\""));
+ }
+
+ #[test]
+ fn render_proc_macro() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::ProcMacro(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ assert!(build_file_content.contains("rust_proc_macro("));
+ assert!(build_file_content.contains("name = \"mock_crate\""));
+ }
+
+ #[test]
+ fn render_binary() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Binary(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ assert!(build_file_content.contains("rust_binary("));
+ assert!(build_file_content.contains("name = \"mock_crate__bin\""));
+ }
+
+ #[test]
+ fn render_additive_build_contents() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Binary(mock_target_attributes())],
+ additive_build_file_content: Some(
+ "# Hello World from additive section!".to_owned(),
+ ),
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ assert!(build_file_content.contains("# Hello World from additive section!"));
+ }
+
+ #[test]
+ fn render_aliases() {
+ let annotations = Annotations::new(
+ test::metadata::alias(),
+ test::lockfile::alias(),
+ Config::default(),
+ )
+ .unwrap();
+ let context = Context::new(annotations).unwrap();
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output.get(&PathBuf::from("BUILD.bazel")).unwrap();
+
+ assert!(build_file_content.contains(r#"name = "names-0.12.1-dev__names","#));
+ assert!(build_file_content.contains(r#"name = "names-0.13.0__names","#));
+ }
+
+ #[test]
+ fn render_crate_repositories() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Library(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ let renderer = Renderer::new(mock_render_config());
+ let output = renderer.render(&context).unwrap();
+
+ let defs_module = output.get(&PathBuf::from("defs.bzl")).unwrap();
+
+ assert!(defs_module.contains("def crate_repositories():"));
+ }
+
+ #[test]
+ fn remote_remote_vendor_mode() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Library(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ // Enable remote vendor mode
+ let config = RenderConfig {
+ vendor_mode: Some(VendorMode::Remote),
+ ..mock_render_config()
+ };
+
+ let renderer = Renderer::new(config);
+ let output = renderer.render(&context).unwrap();
+
+ let defs_module = output.get(&PathBuf::from("defs.bzl")).unwrap();
+ assert!(defs_module.contains("def crate_repositories():"));
+
+ let crates_module = output.get(&PathBuf::from("crates.bzl")).unwrap();
+ assert!(crates_module.contains("def crate_repositories():"));
+ }
+
+ #[test]
+ fn remote_local_vendor_mode() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Library(mock_target_attributes())],
+ ..CrateContext::default()
+ },
+ );
+
+ // Enable local vendor mode
+ let config = RenderConfig {
+ vendor_mode: Some(VendorMode::Local),
+ ..mock_render_config()
+ };
+
+ let renderer = Renderer::new(config);
+ let output = renderer.render(&context).unwrap();
+
+ // Local vendoring does not produce a `crate_repositories` macro
+ let defs_module = output.get(&PathBuf::from("defs.bzl")).unwrap();
+ assert!(!defs_module.contains("def crate_repositories():"));
+
+ // Local vendoring does not produce a `crates.bzl` file.
+ assert!(output.get(&PathBuf::from("crates.bzl")).is_none());
+ }
+}
diff --git a/crate_universe/src/rendering/template_engine.rs b/crate_universe/src/rendering/template_engine.rs
new file mode 100644
index 0000000..792802f
--- /dev/null
+++ b/crate_universe/src/rendering/template_engine.rs
@@ -0,0 +1,399 @@
+//! A template engine backed by [Tera] for rendering Files.
+
+use std::collections::HashMap;
+
+use anyhow::{Context as AnyhowContext, Result};
+use serde_json::{from_value, to_value, Value};
+use tera::{self, Tera};
+
+use crate::config::{CrateId, RenderConfig};
+use crate::context::Context;
+use crate::rendering::{
+ render_crate_bazel_label, render_crate_bazel_repository, render_crate_build_file,
+ render_module_label, render_platform_constraint_label,
+};
+use crate::utils::sanitize_module_name;
+use crate::utils::sanitize_repository_name;
+use crate::utils::starlark::{SelectStringDict, SelectStringList};
+
+pub struct TemplateEngine {
+ engine: Tera,
+ context: tera::Context,
+}
+
+impl TemplateEngine {
+ pub fn new(render_config: &RenderConfig) -> Self {
+ let mut tera = Tera::default();
+ tera.add_raw_templates(vec![
+ (
+ "partials/crate/aliases.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/aliases.j2"
+ )),
+ ),
+ (
+ "partials/crate/binary.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/binary.j2"
+ )),
+ ),
+ (
+ "partials/crate/build_script.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/build_script.j2"
+ )),
+ ),
+ (
+ "partials/crate/common_attrs.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/common_attrs.j2"
+ )),
+ ),
+ (
+ "partials/crate/deps.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/deps.j2"
+ )),
+ ),
+ (
+ "partials/crate/library.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/library.j2"
+ )),
+ ),
+ (
+ "partials/crate/proc_macro.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/crate/proc_macro.j2"
+ )),
+ ),
+ (
+ "partials/module/aliases_map.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/module/aliases_map.j2"
+ )),
+ ),
+ (
+ "partials/module/deps_map.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/module/deps_map.j2"
+ )),
+ ),
+ (
+ "partials/module/repo_git.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/module/repo_git.j2"
+ )),
+ ),
+ (
+ "partials/module/repo_http.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/module/repo_http.j2"
+ )),
+ ),
+ (
+ "partials/starlark/glob.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/starlark/glob.j2"
+ )),
+ ),
+ (
+ "partials/starlark/selectable_dict.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/starlark/selectable_dict.j2"
+ )),
+ ),
+ (
+ "partials/starlark/selectable_list.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/starlark/selectable_list.j2"
+ )),
+ ),
+ (
+ "partials/header.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/partials/header.j2"
+ )),
+ ),
+ (
+ "crate_build_file.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/crate_build_file.j2"
+ )),
+ ),
+ (
+ "module_build_file.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/module_build_file.j2"
+ )),
+ ),
+ (
+ "module_bzl.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/module_bzl.j2"
+ )),
+ ),
+ (
+ "vendor_module.j2",
+ include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/src/rendering/templates/vendor_module.j2"
+ )),
+ ),
+ ])
+ .unwrap();
+
+ tera.register_function(
+ "crate_build_file",
+ crate_build_file_fn_generator(render_config.build_file_template.clone()),
+ );
+ tera.register_function(
+ "crate_label",
+ crate_label_fn_generator(
+ render_config.crate_label_template.clone(),
+ render_config.repository_name.clone(),
+ ),
+ );
+ tera.register_function(
+ "crate_repository",
+ crate_repository_fn_generator(
+ render_config.crate_repository_template.clone(),
+ render_config.repository_name.clone(),
+ ),
+ );
+ tera.register_function(
+ "platform_label",
+ platform_label_fn_generator(render_config.platforms_template.clone()),
+ );
+ tera.register_function("sanitize_module_name", sanitize_module_name_fn);
+ tera.register_function(
+ "crates_module_label",
+ module_label_fn_generator(render_config.crates_module_template.clone()),
+ );
+
+ let mut context = tera::Context::new();
+ context.insert("default_select_list", &SelectStringList::default());
+ context.insert("default_select_dict", &SelectStringDict::default());
+ context.insert("repository_name", &render_config.repository_name);
+ context.insert("vendor_mode", &render_config.vendor_mode);
+ context.insert("Null", &tera::Value::Null);
+ context.insert(
+ "default_package_name",
+ &match render_config.default_package_name.as_ref() {
+ Some(pkg_name) => format!("\"{}\"", pkg_name),
+ None => "None".to_owned(),
+ },
+ );
+
+ Self {
+ engine: tera,
+ context,
+ }
+ }
+
+ fn new_tera_ctx(&self) -> tera::Context {
+ self.context.clone()
+ }
+
+ pub fn render_crate_build_files<'a>(
+ &self,
+ ctx: &'a Context,
+ ) -> Result<HashMap<&'a CrateId, String>> {
+ // Create the render context with the global planned context to be
+ // reused when rendering crates.
+ let mut context = self.new_tera_ctx();
+ context.insert("context", ctx);
+
+ ctx.crates
+ .iter()
+ .map(|(id, _)| {
+ let aliases = ctx.crate_aliases(id, false, false);
+ let build_aliases = ctx.crate_aliases(id, true, false);
+
+ context.insert("crate_id", &id);
+ context.insert("common_aliases", &aliases);
+ context.insert("build_aliases", &build_aliases);
+
+ let content = self
+ .engine
+ .render("crate_build_file.j2", &context)
+ .context("Failed to render BUILD file")?;
+
+ Ok((id, content))
+ })
+ .collect()
+ }
+
+ pub fn render_module_build_file(&self, data: &Context) -> Result<String> {
+ let mut context = self.new_tera_ctx();
+ context.insert("context", data);
+
+ let workspace_member_deps = data.flat_workspace_member_deps();
+ context.insert("workspace_member_dependencies", &workspace_member_deps);
+
+ let binary_crates_map = data.flat_binary_deps();
+ context.insert("binary_crates_map", &binary_crates_map);
+
+ self.engine
+ .render("module_build_file.j2", &context)
+ .context("Failed to render crates module")
+ }
+
+ pub fn render_module_bzl(&self, data: &Context) -> Result<String> {
+ let mut context = self.new_tera_ctx();
+ context.insert("context", data);
+
+ self.engine
+ .render("module_bzl.j2", &context)
+ .context("Failed to render crates module")
+ }
+
+ pub fn render_vendor_module_file(&self, data: &Context) -> Result<String> {
+ let mut context = self.new_tera_ctx();
+ context.insert("context", data);
+
+ self.engine
+ .render("vendor_module.j2", &context)
+ .context("Failed to render vendor module")
+ }
+}
+
+/// A convienience wrapper for parsing parameters to tera functions
+macro_rules! parse_tera_param {
+ ($param:literal, $param_type:ty, $args:ident) => {
+ match $args.get($param) {
+ Some(val) => match from_value::<$param_type>(val.clone()) {
+ Ok(v) => v,
+ Err(_) => {
+ return Err(tera::Error::msg(format!(
+ "The `{}` paramater could not be parsed as a String.",
+ $param
+ )))
+ }
+ },
+ None => {
+ return Err(tera::Error::msg(format!(
+ "No `{}` parameter was passed.",
+ $param
+ )))
+ }
+ }
+ };
+}
+
+/// Convert a crate name into a module name by applying transforms to invalid characters.
+fn sanitize_module_name_fn(args: &HashMap<String, Value>) -> tera::Result<Value> {
+ let crate_name = parse_tera_param!("crate_name", String, args);
+
+ match to_value(sanitize_module_name(&crate_name)) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate resulting module name")),
+ }
+}
+
+/// Convert a crate name into a module name by applying transforms to invalid characters.
+fn platform_label_fn_generator(template: String) -> impl tera::Function {
+ Box::new(
+ move |args: &HashMap<String, Value>| -> tera::Result<Value> {
+ let triple = parse_tera_param!("triple", String, args);
+ match to_value(render_platform_constraint_label(&template, &triple)) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate resulting module name")),
+ }
+ },
+ )
+}
+
+/// Convert a crate name into a module name by applying transforms to invalid characters.
+fn crate_build_file_fn_generator(template: String) -> impl tera::Function {
+ Box::new(
+ move |args: &HashMap<String, Value>| -> tera::Result<Value> {
+ let name = parse_tera_param!("name", String, args);
+ let version = parse_tera_param!("version", String, args);
+
+ match to_value(render_crate_build_file(&template, &name, &version)) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate crate's BUILD file")),
+ }
+ },
+ )
+}
+
+/// Convert a file name to a Bazel label
+fn module_label_fn_generator(template: String) -> impl tera::Function {
+ Box::new(
+ move |args: &HashMap<String, Value>| -> tera::Result<Value> {
+ let file = parse_tera_param!("file", String, args);
+
+ let label = match render_module_label(&template, &file) {
+ Ok(v) => v,
+ Err(e) => return Err(tera::Error::msg(e)),
+ };
+
+ match to_value(label.to_string()) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate crate's BUILD file")),
+ }
+ },
+ )
+}
+
+/// Convert a crate name into a module name by applying transforms to invalid characters.
+fn crate_label_fn_generator(template: String, repository_name: String) -> impl tera::Function {
+ Box::new(
+ move |args: &HashMap<String, Value>| -> tera::Result<Value> {
+ let name = parse_tera_param!("name", String, args);
+ let version = parse_tera_param!("version", String, args);
+ let target = parse_tera_param!("target", String, args);
+
+ match to_value(sanitize_repository_name(&render_crate_bazel_label(
+ &template,
+ &repository_name,
+ &name,
+ &version,
+ &target,
+ ))) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate crate's label")),
+ }
+ },
+ )
+}
+
+/// Convert a crate name into a module name by applying transforms to invalid characters.
+fn crate_repository_fn_generator(template: String, repository_name: String) -> impl tera::Function {
+ Box::new(
+ move |args: &HashMap<String, Value>| -> tera::Result<Value> {
+ let name = parse_tera_param!("name", String, args);
+ let version = parse_tera_param!("version", String, args);
+
+ match to_value(sanitize_repository_name(&render_crate_bazel_repository(
+ &template,
+ &repository_name,
+ &name,
+ &version,
+ ))) {
+ Ok(v) => Ok(v),
+ Err(_) => Err(tera::Error::msg("Failed to generate crate repository name")),
+ }
+ },
+ )
+}
diff --git a/crate_universe/src/rendering/templates/crate_build_file.j2 b/crate_universe/src/rendering/templates/crate_build_file.j2
new file mode 100644
index 0000000..ff9d4ad
--- /dev/null
+++ b/crate_universe/src/rendering/templates/crate_build_file.j2
@@ -0,0 +1,44 @@
+{%- set crate = context.crates | get(key=crate_id) %}
+{%- include "partials/header.j2" %}
+
+load(
+ "@bazel_skylib//lib:selects.bzl",
+ "selects",
+)
+load(
+ "@rules_rust//cargo:defs.bzl",
+ "cargo_build_script",
+)
+load(
+ "@rules_rust//rust:defs.bzl",
+ "rust_binary",
+ "rust_library",
+ "rust_proc_macro",
+)
+
+# buildifier: disable=bzl-visibility
+load("@rules_rust//crate_universe/private:selects.bzl", "select_with_or")
+
+package(default_visibility = ["//visibility:public"])
+
+# licenses([
+# "TODO", # {{ crate.license }}
+# ])
+
+{% for rule in crate.targets -%}
+{%- for rule_type, target in rule %}
+{%- if rule_type in ["BuildScript"] %}
+{% include "partials/crate/build_script.j2" %}
+{%- elif rule_type in ["ProcMacro"] %}
+{% include "partials/crate/proc_macro.j2" %}
+{%- elif rule_type in ["Library"] %}
+{% include "partials/crate/library.j2" %}
+{%- elif rule_type in ["Binary"] %}
+{% include "partials/crate/binary.j2" %}
+{%- endif %}
+{%- endfor %}
+{%- endfor %}
+{%- if crate.additive_build_file_content %}
+# Additive BUILD file content
+{{ crate.additive_build_file_content }}
+{%- endif %}
diff --git a/crate_universe/src/rendering/templates/module_build_file.j2 b/crate_universe/src/rendering/templates/module_build_file.j2
new file mode 100644
index 0000000..63124dc
--- /dev/null
+++ b/crate_universe/src/rendering/templates/module_build_file.j2
@@ -0,0 +1,49 @@
+{%- include "partials/header.j2" %}
+
+package(default_visibility = ["//visibility:public"])
+
+exports_files(
+ [
+ "cargo-bazel.json",
+ "defs.bzl",
+ {%- set current_vendor_mode = vendor_mode | default(value="") %}{%- if current_vendor_mode == "remote" %}"crates.bzl",{%- endif %}
+ ] + glob([
+ "*.bazel",
+ ]),
+)
+
+filegroup(
+ name = "srcs",
+ srcs = glob([
+ "*.bazel",
+ "*.bzl",
+ ]),
+)
+
+# Workspace Member Dependencies
+{%- for dep, rename in workspace_member_dependencies %}
+{%- set crate = context.crates | get(key=dep) %}
+{%- if crate | get(key="library_target_name", default=Null) %}
+alias(
+ name = "{{ rename | default(value=crate.name) }}",
+ actual = "{{ crate_label(name = crate.name, version = crate.version, target = crate.library_target_name) }}",
+ tags = ["manual"],
+)
+{%- endif %}
+{%- endfor %}
+
+# Binaries
+{%- for id, rename in binary_crates_map %}
+{%- set crate = context.crates | get(key=id) %}
+{%- for rule in crate.targets %}
+{%- for rule_type, target in rule %}
+{%- if rule_type in ["Binary"] %}
+alias(
+ name = "{{ rename | default(value=crate.name) }}__{{ target.crate_name }}",
+ actual = "{{ crate_label(name = crate.name, version = crate.version, target = target.crate_name ~ '__bin') }}",
+ tags = ["manual"],
+)
+{%- endif %}
+{%- endfor %}
+{%- endfor %}
+{%- endfor %}
diff --git a/crate_universe/src/rendering/templates/module_bzl.j2 b/crate_universe/src/rendering/templates/module_bzl.j2
new file mode 100644
index 0000000..4ea1624
--- /dev/null
+++ b/crate_universe/src/rendering/templates/module_bzl.j2
@@ -0,0 +1,338 @@
+{#
+To keep line numbers consistent with the rendered version, empty space is
+intentionally plced here which should match the line length of `partials/header.j2`.
+
+Expected length = 6 lines
+#}{%- include "partials/header.j2" %}
+"""
+# `crates_repository` API
+
+- [aliases](#aliases)
+- [crate_deps](#crate_deps)
+- [all_crate_deps](#all_crate_deps)
+- [crate_repositories](#crate_repositories)
+
+"""
+
+load("@bazel_tools//tools/build_defs/repo:git.bzl", "new_git_repository")
+load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+load("@bazel_skylib//lib:selects.bzl", "selects")
+
+###############################################################################
+# MACROS API
+###############################################################################
+
+# An identifier that represent common dependencies (unconditional).
+_COMMON_CONDITION = ""
+
+def _flatten_dependency_maps(all_dependency_maps):
+ """Flatten a list of dependency maps into one dictionary.
+
+ Dependency maps have the following structure:
+
+ ```python
+ DEPENDENCIES_MAP = {
+ # The first key in the map is a Bazel package
+ # name of the workspace this file is defined in.
+ "workspace_member_package": {
+
+ # Not all dependnecies are supported for all platforms.
+ # the condition key is the condition required to be true
+ # on the host platform.
+ "condition": {
+
+ # An alias to a crate target. # The label of the crate target the
+ # Aliases are only crate names. # package name refers to.
+ "package_name": "@full//:label",
+ }
+ }
+ }
+ ```
+
+ Args:
+ all_dependency_maps (list): A list of dicts as described above
+
+ Returns:
+ dict: A dictionary as described above
+ """
+ dependencies = {}
+
+ for workspace_deps_map in all_dependency_maps:
+ for pkg_name, conditional_deps_map in workspace_deps_map.items():
+ if pkg_name not in dependencies:
+ non_frozen_map = dict()
+ for key, values in conditional_deps_map.items():
+ non_frozen_map.update({key: dict(values.items())})
+ dependencies.setdefault(pkg_name, non_frozen_map)
+ continue
+
+ for condition, deps_map in conditional_deps_map.items():
+ # If the condition has not been recorded, do so and continue
+ if condition not in dependencies[pkg_name]:
+ dependencies[pkg_name].setdefault(condition, dict(deps_map.items()))
+ continue
+
+ # Alert on any miss-matched dependencies
+ inconsistent_entries = []
+ for crate_name, crate_label in deps_map.items():
+ existing = dependencies[pkg_name][condition].get(crate_name)
+ if existing and existing != crate_label:
+ inconsistent_entries.append((crate_name, existing, crate_label))
+ dependencies[pkg_name][condition].update({crate_name: crate_label})
+
+ return dependencies
+
+def crate_deps(deps, package_name = {{ default_package_name }}):
+ """Finds the fully qualified label of the requested crates for the package where this macro is called.
+
+ Args:
+ deps (list): The desired list of crate targets.
+ package_name (str, optional): The package name of the set of dependencies to look up.
+ Defaults to `native.package_name()`.
+
+ Returns:
+ list: A list of labels to generated rust targets (str)
+ """
+
+ if not deps:
+ return []
+
+ if package_name == None:
+ package_name = native.package_name()
+
+ # Join both sets of dependencies
+ dependencies = _flatten_dependency_maps([
+ _NORMAL_DEPENDENCIES,
+ _NORMAL_DEV_DEPENDENCIES,
+ _PROC_MACRO_DEPENDENCIES,
+ _PROC_MACRO_DEV_DEPENDENCIES,
+ _BUILD_DEPENDENCIES,
+ _BUILD_PROC_MACRO_DEPENDENCIES,
+ ]).pop(package_name, {})
+
+ # Combine all conditional packages so we can easily index over a flat list
+ # TODO: Perhaps this should actually return select statements and maintain
+ # the conditionals of the dependencies
+ flat_deps = {}
+ for deps_set in dependencies.values():
+ for crate_name, crate_label in deps_set.items():
+ flat_deps.update({crate_name: crate_label})
+
+ missing_crates = []
+ crate_targets = []
+ for crate_target in deps:
+ if crate_target not in flat_deps:
+ missing_crates.append(crate_target)
+ else:
+ crate_targets.append(flat_deps[crate_target])
+
+ if missing_crates:
+ fail("Could not find crates `{}` among dependencies of `{}`. Available dependencies were `{}`".format(
+ missing_crates,
+ package_name,
+ dependencies,
+ ))
+
+ return crate_targets
+
+def all_crate_deps(
+ normal = False,
+ normal_dev = False,
+ proc_macro = False,
+ proc_macro_dev = False,
+ build = False,
+ build_proc_macro = False,
+ package_name = {{ default_package_name }}):
+ """Finds the fully qualified label of all requested direct crate dependencies \
+ for the package where this macro is called.
+
+ If no parameters are set, all normal dependencies are returned. Setting any one flag will
+ otherwise impact the contents of the returned list.
+
+ Args:
+ normal (bool, optional): If True, normal dependencies are included in the
+ output list.
+ normal_dev (bool, optional): If True, normla dev dependencies will be
+ included in the output list..
+ proc_macro (bool, optional): If True, proc_macro dependencies are included
+ in the output list.
+ proc_macro_dev (bool, optional): If True, dev proc_macro dependencies are
+ included in the output list.
+ build (bool, optional): If True, build dependencies are included
+ in the output list.
+ build_proc_macro (bool, optional): If True, build proc_macro dependencies are
+ included in the output list.
+ package_name (str, optional): The package name of the set of dependencies to look up.
+ Defaults to `native.package_name()` when unset.
+
+ Returns:
+ list: A list of labels to generated rust targets (str)
+ """
+
+ if package_name == None:
+ package_name = native.package_name()
+
+ # Determine the relevant maps to use
+ all_dependency_maps = []
+ if normal:
+ all_dependency_maps.append(_NORMAL_DEPENDENCIES)
+ if normal_dev:
+ all_dependency_maps.append(_NORMAL_DEV_DEPENDENCIES)
+ if proc_macro:
+ all_dependency_maps.append(_PROC_MACRO_DEPENDENCIES)
+ if proc_macro_dev:
+ all_dependency_maps.append(_PROC_MACRO_DEV_DEPENDENCIES)
+ if build:
+ all_dependency_maps.append(_BUILD_DEPENDENCIES)
+ if build_proc_macro:
+ all_dependency_maps.append(_BUILD_PROC_MACRO_DEPENDENCIES)
+
+ # Default to always using normal dependencies
+ if not all_dependency_maps:
+ all_dependency_maps.append(_NORMAL_DEPENDENCIES)
+
+ dependencies = _flatten_dependency_maps(all_dependency_maps).pop(package_name, None)
+
+ if not dependencies:
+ return []
+
+ crate_deps = list(dependencies.pop(_COMMON_CONDITION, {}).values())
+ for condition, deps in dependencies.items():
+ crate_deps += selects.with_or({_CONDITIONS[condition]: deps.values()})
+
+ return crate_deps
+
+def aliases(
+ normal = False,
+ normal_dev = False,
+ proc_macro = False,
+ proc_macro_dev = False,
+ build = False,
+ build_proc_macro = False,
+ package_name = {{ default_package_name }}):
+ """Produces a map of Crate alias names to their original label
+
+ If no dependency kinds are specified, `normal` and `proc_macro` are used by default.
+ Setting any one flag will otherwise determine the contents of the returned dict.
+
+ Args:
+ normal (bool, optional): If True, normal dependencies are included in the
+ output list.
+ normal_dev (bool, optional): If True, normla dev dependencies will be
+ included in the output list..
+ proc_macro (bool, optional): If True, proc_macro dependencies are included
+ in the output list.
+ proc_macro_dev (bool, optional): If True, dev proc_macro dependencies are
+ included in the output list.
+ build (bool, optional): If True, build dependencies are included
+ in the output list.
+ build_proc_macro (bool, optional): If True, build proc_macro dependencies are
+ included in the output list.
+ package_name (str, optional): The package name of the set of dependencies to look up.
+ Defaults to `native.package_name()` when unset.
+
+ Returns:
+ dict: The aliases of all associated packages
+ """
+ if package_name == None:
+ package_name = native.package_name()
+
+ # Determine the relevant maps to use
+ all_aliases_maps = []
+ if normal:
+ all_aliases_maps.append(_NORMAL_ALIASES)
+ if normal_dev:
+ all_aliases_maps.append(_NORMAL_DEV_ALIASES)
+ if proc_macro:
+ all_aliases_maps.append(_PROC_MACRO_ALIASES)
+ if proc_macro_dev:
+ all_aliases_maps.append(_PROC_MACRO_DEV_ALIASES)
+ if build:
+ all_aliases_maps.append(_BUILD_ALIASES)
+ if build_proc_macro:
+ all_aliases_maps.append(_BUILD_PROC_MACRO_ALIASES)
+
+ # Default to always using normal aliases
+ if not all_aliases_maps:
+ all_aliases_maps.append(_NORMAL_ALIASES)
+ all_aliases_maps.append(_PROC_MACRO_ALIASES)
+
+ aliases = _flatten_dependency_maps(all_aliases_maps).pop(package_name, None)
+
+ if not aliases:
+ return dict()
+
+ common_items = aliases.pop(_COMMON_CONDITION, {}).items()
+
+ # If there are only common items in the dictionary, immediately return them
+ if not len(aliases.keys()) == 1:
+ return dict(common_items)
+
+ # Build a single select statement where each conditional has accounted for the
+ # common set of aliases.
+ crate_aliases = {"//conditions:default": common_items}
+ for condition, deps in aliases.items():
+ condition_triples = _CONDITIONS[condition]
+ if condition_triples in crate_aliases:
+ crate_aliases[condition_triples].update(deps)
+ else:
+ crate_aliases.update({_CONDITIONS[condition]: dict(deps.items() + common_items)})
+
+ return selects.with_or(crate_aliases)
+
+###############################################################################
+# WORKSPACE MEMBER DEPS AND ALIASES
+###############################################################################
+
+_NORMAL_DEPENDENCIES = {% set deps_type = "normal" %}{% include "partials/module/deps_map.j2" %}
+
+_NORMAL_ALIASES = {% set deps_type = "normal" %}{% include "partials/module/aliases_map.j2" %}
+
+_NORMAL_DEV_DEPENDENCIES = {% set deps_type = "normal-dev" %}{% include "partials/module/deps_map.j2" %}
+
+_NORMAL_DEV_ALIASES = {% set deps_type = "normal-dev" %}{% include "partials/module/aliases_map.j2" %}
+
+_PROC_MACRO_DEPENDENCIES = {% set deps_type = "proc-macro" %}{% include "partials/module/deps_map.j2" %}
+
+_PROC_MACRO_ALIASES = {% set deps_type = "proc-macro-dev" %}{% include "partials/module/aliases_map.j2" %}
+
+_PROC_MACRO_DEV_DEPENDENCIES = {% set deps_type = "proc-macro-dev" %}{% include "partials/module/deps_map.j2" %}
+
+_PROC_MACRO_DEV_ALIASES = {% set deps_type = "normal-dev" %}{% include "partials/module/aliases_map.j2" %}
+
+_BUILD_DEPENDENCIES = {% set deps_type = "build" %}{% include "partials/module/deps_map.j2" %}
+
+_BUILD_ALIASES = {% set deps_type = "build" %}{% include "partials/module/aliases_map.j2" %}
+
+_BUILD_PROC_MACRO_DEPENDENCIES = {% set deps_type = "build-proc-macro" %}{% include "partials/module/deps_map.j2" %}
+
+_BUILD_PROC_MACRO_ALIASES = {% set deps_type = "build-proc-macro" %}{% include "partials/module/aliases_map.j2" %}
+
+_CONDITIONS = {
+{%- for condition, triples in context.conditions %}
+ "{{ condition | addslashes }}": {{ triples | sort | json_encode | safe }},
+{%- endfor %}
+}
+{% set current_vendor_mode = vendor_mode | default(value="remote") %}{% if current_vendor_mode == "remote" %}
+###############################################################################
+
+def crate_repositories():
+ """A macro for defining repositories for all generated crates"""
+{%- if context.crates | length %}
+{%- for id, crate in context.crates %}
+{%- if not crate.repository %}{% continue %}{% endif %}
+{%- for repository_type, attrs in crate.repository %}
+{%- if repository_type in ["Http"] %}
+{% include "partials/module/repo_http.j2" %}
+{%- elif repository_type in ["Git"] %}
+{% include "partials/module/repo_git.j2" %}
+{%- else %}
+ {{ throw(message = "Unsupported checksum type: " ~ repository_type) }}
+{%- endif %}
+{%- endfor %}
+{%- endfor %}
+{%- else %}
+ pass
+{%- endif %}
+{%- endif %}
diff --git a/crate_universe/src/rendering/templates/partials/crate/aliases.j2 b/crate_universe/src/rendering/templates/partials/crate/aliases.j2
new file mode 100644
index 0000000..5c4b67e
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/aliases.j2
@@ -0,0 +1,32 @@
+selects.with_or({
+ {%- for cfg, values in selectable.selects %}
+ {%- if cfg in context.conditions and context.conditions[cfg] | length %}
+ # {{ cfg }}
+ (
+ {%- for triple in context.conditions[cfg] %}
+ "{{ platform_label(triple = triple) }}",
+ {%- endfor %}
+ ): {
+ {%- for dep in values %}
+ {%- set dep_crate = context.crates | get(key=dep.id) %}
+ "{{ crate_label(name = dep_crate.name, version = dep_crate.version, target = dep.target) }}": "{{ dep.alias }}",
+ {%- endfor %}
+ {%- for dep in selectable.common %}
+ {%- set dep_crate = context.crates | get(key=dep.id) %}
+ "{{ crate_label(name = dep_crate.name, version = dep_crate.version, target = dep.target) }}": "{{ dep.alias }}",
+ {%- endfor %}
+ },
+ {%- else %}
+ # {
+ # No supported platform triples for cfg: '{{ cfg }}'
+ # Skipped dependencies: {{ values | json_encode | safe }}
+ # }
+ {%- endif %}
+ {%- endfor %}
+ "//conditions:default": {
+ {%- for dep in selectable.common %}
+ {%- set dep_crate = context.crates | get(key=dep.id) %}
+ "{{ crate_label(name = dep_crate.name, version = dep_crate.version, target = dep.target) }}": "{{ dep.alias }}",
+ {%- endfor %}
+ },
+ })
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/partials/crate/binary.j2 b/crate_universe/src/rendering/templates/partials/crate/binary.j2
new file mode 100644
index 0000000..2cdc5d9
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/binary.j2
@@ -0,0 +1,18 @@
+rust_binary(
+ name = "{{ target.crate_name }}__bin",
+ deps = [
+ {%- if crate.library_target_name %}
+ ":{{ crate.library_target_name }}",
+ {%- endif %}
+ {%- for dep in crate.common_attrs | get(key="extra_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ proc_macro_deps = [
+ {%- for dep in crate.common_attrs | get(key="extra_proc_macro_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="proc_macro_deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ aliases = {% set selectable = common_aliases %}{% include "partials/crate/aliases.j2" -%},
+{% include "partials/crate/common_attrs.j2" %}
+)
diff --git a/crate_universe/src/rendering/templates/partials/crate/build_script.j2 b/crate_universe/src/rendering/templates/partials/crate/build_script.j2
new file mode 100644
index 0000000..45b97f7
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/build_script.j2
@@ -0,0 +1,70 @@
+cargo_build_script(
+ # See comment associated with alias. Do not change this name
+ name = "{{ crate.name }}_build_script",
+ aliases = {% set selectable = build_aliases %}{% include "partials/crate/aliases.j2" -%},
+ build_script_env = {% set selectable = crate.build_script_attrs | get(key="build_script_env", default=Null) %}{% include "partials/starlark/selectable_dict.j2" -%},
+ compile_data = {% if crate.build_script_attrs | get(key="compile_data_glob") %}glob({{ crate.build_script_attrs.compile_data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.build_script_attrs | get(key="compile_data", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ crate_name = "{{ sanitize_module_name(crate_name=target.crate_name) }}",
+ crate_root = "{{ target.crate_root }}",
+ crate_features = [
+ {%- if crate.common_attrs | get(key="crate_features", default=Null) %}
+ {%- for feature in crate.common_attrs.crate_features %}
+ "{{ feature }}",
+ {%- endfor %}
+ {%- endif %}
+ ],
+ data = {% if crate.build_script_attrs | get(key="data_glob") %}glob({{ crate.build_script_attrs.data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.build_script_attrs | get(key="data", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ deps = [
+ {%- for dep in crate.build_script_attrs | get(key="extra_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.build_script_attrs | get(key="deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ edition = "{{ crate.common_attrs.edition }}",
+ {%- if crate.common_attrs.linker_script %}
+ linker_script = "{{ crate.common_attrs.linker_script }}",
+ {%- endif %}
+ {%- if crate.build_script_attrs | get(key="links", default=Null) %}
+ links = "{{ crate.build_script_attrs.links }}",
+ {%- endif %}
+ proc_macro_deps = [
+ {%- for dep in crate.build_script_attrs | get(key="extra_proc_macro_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.build_script_attrs | get(key="proc_macro_deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ rustc_env = {% set selectable = crate.build_script_attrs | get(key="rustc_env", default=Null) %}{% include "partials/starlark/selectable_dict.j2" -%},
+ rustc_env_files = {% set selectable = crate.build_script_attrs | get(key="rustc_env_files", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ rustc_flags = [
+ # In most cases, warnings in 3rd party crates are not interesting as
+ # they're out of the control of consumers. The flag here silences
+ # warnings. For more details see:
+ # https://doc.rust-lang.org/rustc/lints/levels.html
+ "--cap-lints=allow",
+ ] + {% set selectable = crate.build_script_attrs | get(key="rustc_flags", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ srcs = {% set glob = target.srcs %}{% include "partials/starlark/glob.j2" -%},
+ tools = {% set selectable = crate.build_script_attrs | get(key="tools", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ version = "{{ crate.common_attrs.version }}",
+ tags = [
+ {%- if crate.common_attrs | get(key="tags", default=Null) %}
+ {%- for tag in crate.common_attrs.tags %}
+ "{{ tag }}",
+ {%- endfor %}
+ {%- endif %}
+ "cargo-bazel",
+ "manual",
+ "noclippy",
+ "norustfmt",
+ ],
+ visibility = ["//visibility:private"],
+)
+alias(
+ # Because `cargo_build_script` does some invisible target name mutating to
+ # determine the package and crate name for a build script, the Bazel
+ # target namename of any build script cannot be the Cargo canonical name
+ # of `build_script_build` without losing out on having certain Cargo
+ # environment variables set.
+ name = "{{ target.crate_name }}",
+ actual = "{{ crate.name }}_build_script",
+ tags = [
+ "manual",
+ ],
+)
diff --git a/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2 b/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2
new file mode 100644
index 0000000..a381f44
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2
@@ -0,0 +1,34 @@
+ compile_data = {% if crate.common_attrs | get(key="compile_data_glob") %}glob({{ crate.common_attrs.compile_data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.common_attrs | get(key="compile_data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
+ crate_root = "{{ target.crate_root }}",
+ crate_features = [
+ {%- for feature in crate.common_attrs | get(key="crate_features", default=[]) %}
+ "{{ feature }}",
+ {%- endfor %}
+ ],
+ data = {% if crate.common_attrs | get(key="data_glob") %}glob({{ crate.common_attrs.data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.common_attrs | get(key="data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
+ edition = "{{ crate.common_attrs.edition }}",
+ {%- if crate.common_attrs | get(key="linker_script", default=Null) %}
+ linker_script = "{{ crate.common_attrs.linker_script }}",
+ {%- endif %}
+ rustc_env = {% set selectable = crate.common_attrs | get(key="rustc_env", default=Null) %}{% include "partials/starlark/selectable_dict.j2" -%},
+ rustc_env_files = {% set selectable = crate.common_attrs | get(key="rustc_env_files", default=Null) %}{% include "partials/starlark/selectable_list.j2" -%},
+ rustc_flags = [
+ # In most cases, warnings in 3rd party crates are not interesting as
+ # they're out of the control of consumers. The flag here silences
+ # warnings. For more details see:
+ # https://doc.rust-lang.org/rustc/lints/levels.html
+ "--cap-lints=allow",
+ ] + {% set selectable = crate.common_attrs | get(key="rustc_flags", default=Null) %}{% include "partials/starlark/selectable_list.j2" -%},
+ srcs = {% set glob = target.srcs %}{% include "partials/starlark/glob.j2" -%},
+ version = "{{ crate.common_attrs.version }}",
+ tags = [
+ {%- if crate.common_attrs | get(key="tags", default=Null) %}
+ {%- for tag in crate.common_attrs.tags %}
+ "{{ tag }}",
+ {%- endfor %}
+ {%- endif %}
+ "cargo-bazel",
+ "manual",
+ "noclippy",
+ "norustfmt",
+ ],
diff --git a/crate_universe/src/rendering/templates/partials/crate/deps.j2 b/crate_universe/src/rendering/templates/partials/crate/deps.j2
new file mode 100644
index 0000000..0e0bf71
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/deps.j2
@@ -0,0 +1,36 @@
+select_with_or({
+ {%- set selectable = deps | default(value=default_select_list) %}
+ {%- for cfg, values in selectable.selects %}
+ # {{ cfg }}
+ {%- if cfg in context.conditions and context.conditions[cfg] | length %}
+ (
+ {%- for triple in context.conditions[cfg] %}
+ "{{ platform_label(triple = triple) }}",
+ {%- endfor %}
+ ): [
+ # Target Deps
+ {%- for dep in values %}
+ {%- set dep_crate = context.crates | get(key=dep.id) %}
+ "{{ crate_label(name = dep_crate.name, version = dep_crate.version, target = dep.target) }}",
+ {%- endfor %}
+
+ # Common Deps
+ {%- for common_dep in selectable.common %}
+ {%- set common_dep_crate = context.crates | get(key=common_dep.id) %}
+ "{{ crate_label(name = common_dep_crate.name, version = common_dep_crate.version, target = common_dep.target) }}",
+ {%- endfor %}
+ ],
+ {%- else %}
+ #
+ # No supported platform triples for cfg: '{{ cfg }}'
+ # Skipped dependencies: {{ values | json_encode | safe }}
+ #
+ {%- endif %}
+ {%- endfor %}
+ "//conditions:default": [
+ {%- for common_dep in selectable.common %}
+ {%- set common_dep_crate = context.crates | get(key=common_dep.id) %}
+ "{{ crate_label(name = common_dep_crate.name, version = common_dep_crate.version, target = common_dep.target) }}",
+ {%- endfor %}
+ ],
+ })
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/partials/crate/library.j2 b/crate_universe/src/rendering/templates/partials/crate/library.j2
new file mode 100644
index 0000000..f678bd9
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/library.j2
@@ -0,0 +1,15 @@
+rust_library(
+ name = "{{ target.crate_name }}",
+ deps = [
+ {%- for dep in crate.common_attrs | get(key="extra_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ proc_macro_deps = [
+ {%- for dep in crate.common_attrs | get(key="extra_proc_macro_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="proc_macro_deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ aliases = {% set selectable = common_aliases %}{% include "partials/crate/aliases.j2" -%},
+{% include "partials/crate/common_attrs.j2" %}
+)
diff --git a/crate_universe/src/rendering/templates/partials/crate/proc_macro.j2 b/crate_universe/src/rendering/templates/partials/crate/proc_macro.j2
new file mode 100644
index 0000000..c0b9d1d
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/crate/proc_macro.j2
@@ -0,0 +1,15 @@
+rust_proc_macro(
+ name = "{{ target.crate_name }}",
+ deps = [
+ {%- for dep in crate.common_attrs | get(key="extra_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ proc_macro_deps = [
+ {%- for dep in crate.common_attrs | get(key="extra_proc_macro_deps", default=[]) %}
+ "{{ dep }}",
+ {%- endfor %}
+ ] + {% set deps = crate.common_attrs | get(key="proc_macro_deps", default=Null) %}{% include "partials/crate/deps.j2" %},
+ aliases = {% set selectable = common_aliases %}{% include "partials/crate/aliases.j2" -%},
+{% include "partials/crate/common_attrs.j2" %}
+)
diff --git a/crate_universe/src/rendering/templates/partials/header.j2 b/crate_universe/src/rendering/templates/partials/header.j2
new file mode 100644
index 0000000..6f88e85
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/header.j2
@@ -0,0 +1,6 @@
+###############################################################################
+# @generated
+# This file is auto-generated by the cargo-bazel tool.
+#
+# DO NOT MODIFY: Local changes may be replaced in future executions.
+###############################################################################
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/partials/module/aliases_map.j2 b/crate_universe/src/rendering/templates/partials/module/aliases_map.j2
new file mode 100644
index 0000000..73d736e
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/module/aliases_map.j2
@@ -0,0 +1,53 @@
+{
+ {%- for id, path in context.workspace_members %}
+ {%- set workspace_member = context.crates | get(key=id) %}
+ "{{ path }}": {
+ {%- if deps_type in ["normal"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="deps", default=default_select_list) %}
+ {%- elif deps_type in ["normal-dev"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="deps_dev", default=default_select_list) %}
+ {%- elif deps_type in ["proc-macro"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="proc_macro_deps", default=default_select_list) %}
+ {%- elif deps_type in ["proc-macro-dev"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="proc_macro_deps_dev", default=default_select_list) %}
+ {%- elif deps_type in ["build"] %}
+ {%- if workspace_member | get(key="build_script_attrs", default=Null) %}
+ {%- set_global deps_set = workspace_member.build_script_attrs | get(key="deps", default=default_select_list) %}
+ {%- else %}
+ {%- set_global deps_set = default_select_list %}
+ {%- endif %}
+ {%- elif deps_type in ["build-proc-macro"] %}
+ {%- if workspace_member | get(key="build_script_attrs", default=Null) %}
+ {%- set_global deps_set = workspace_member.build_script_attrs | get(key="proc_macro_deps", default=default_select_list) %}
+ {%- else %}
+ {%- set_global deps_set = default_select_list %}
+ {%- endif %}
+ {%- else %}
+ {%- endif %}
+ {%- if deps_set.common | length %}
+ _COMMON_CONDITION: {
+ {%- for dep in deps_set.common %}
+ {%- if dep.id in context.workspace_members %}{% continue %}}{% endif %}{# Workspace member repositories are not defined, skip adding their labels here #}
+ {%- set crate = context.crates | get(key=dep.id) %}
+ {%- if dep | get(key="alias", default=Null) %}
+ "{{ crate_label(name = crate.name, version = crate.version, target = crate.name) }}": "{{ dep.alias }}",
+ {%- endif %}
+ {%- endfor %}
+ },
+ {%- endif %}
+ {%- if deps_set.selects | length %}
+ {%- for condition, deps in deps_set.selects %}
+ "{{ condition | addslashes }}": {
+ {%- for dep in deps %}
+ {%- if dep.id in context.workspace_members %}{% continue %}}{% endif %}{# Workspace member repositories are not defined, skip adding their labels here #}
+ {%- if dep | get(key="alias", default=Null) %}
+ {%- set crate = context.crates | get(key=dep.id) %}
+ "{{ crate_label(name = crate.name, version = crate.version, target = crate.name) }}": "{{ dep.alias }}",
+ {%- endif %}
+ {%- endfor %}
+ },
+ {%- endfor %}
+ {%- endif %}
+ },
+ {%- endfor %}
+}
diff --git a/crate_universe/src/rendering/templates/partials/module/deps_map.j2 b/crate_universe/src/rendering/templates/partials/module/deps_map.j2
new file mode 100644
index 0000000..c9f169d
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/module/deps_map.j2
@@ -0,0 +1,50 @@
+{
+ {%- for id, path in context.workspace_members %}
+ {%- set workspace_member = context.crates | get(key=id) %}
+ "{{ path }}": {
+ {%- if deps_type in ["normal"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="deps", default=default_select_list) %}
+ {%- elif deps_type in ["normal-dev"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="deps_dev", default=default_select_list) %}
+ {%- elif deps_type in ["proc-macro"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="proc_macro_deps", default=default_select_list) %}
+ {%- elif deps_type in ["proc-macro-dev"] %}
+ {%- set_global deps_set = workspace_member.common_attrs | get(key="proc_macro_deps_dev", default=default_select_list) %}
+ {%- elif deps_type in ["build"] %}
+ {%- if workspace_member | get(key="build_script_attrs", default=Null) %}
+ {%- set_global deps_set = workspace_member.build_script_attrs | get(key="deps", default=default_select_list) %}
+ {%- else %}
+ {%- set_global deps_set = default_select_list %}
+ {%- endif %}
+ {%- elif deps_type in ["build-proc-macro"] %}
+ {%- if workspace_member | get(key="build_script_attrs", default=Null) %}
+ {%- set_global deps_set = workspace_member.build_script_attrs | get(key="proc_macro_deps", default=default_select_list) %}
+ {%- else %}
+ {%- set_global deps_set = default_select_list %}
+ {%- endif %}
+ {%- else %}
+ {{ throw(message= "Unexpected dependency type '" ~ deps_type ~ "' for '" ~ id ~ "'") }}
+ {%- endif %}
+ {%- if deps_set.common | length %}
+ _COMMON_CONDITION: {
+ {%- for dep in deps_set.common %}
+ {%- if dep.id in context.workspace_members %}{% continue %}}{% endif %}{# Workspace member repositories are not defined, skip adding their labels here #}
+ {%- set crate = context.crates | get(key=dep.id) %}
+ "{{ dep | get(key="alias", default=crate.name) }}": "{{ crate_label(name = crate.name, version = crate.version, target = dep.target) }}",
+ {%- endfor %}
+ },
+ {%- endif %}
+ {%- if deps_set.selects | length %}
+ {%- for condition, deps in deps_set.selects %}
+ "{{ condition | addslashes }}": {
+ {%- for dep in deps %}
+ {%- if dep.id in context.workspace_members %}{% continue %}}{% endif %}{# Workspace member repositories are not defined, skip adding their labels here #}
+ {%- set crate = context.crates | get(key=dep.id) %}
+ "{{ dep | get(key="alias", default=crate.name) }}": "{{ crate_label(name = crate.name, version = crate.version, target = dep.target) }}",
+ {%- endfor %}
+ },
+ {%- endfor %}
+ {%- endif %}
+ },
+ {%- endfor %}
+}
diff --git a/crate_universe/src/rendering/templates/partials/module/repo_git.j2 b/crate_universe/src/rendering/templates/partials/module/repo_git.j2
new file mode 100644
index 0000000..3bc2392
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/module/repo_git.j2
@@ -0,0 +1,41 @@
+ maybe(
+ new_git_repository,
+ name = "{{ crate_repository(name = crate.name, version = crate.version) }}",
+ {%- for type, commitish in attrs.commitish %}
+ {%- if type in ["Rev"] %}
+ commit = "{{ commitish }}",
+ {%- elif type in ["Tag"] %}
+ tag = "{{ commitish }}",
+ {%- elif type in ["Branch"] %}
+ branch = "{{ commitish }}",
+ {%- else %}
+ {{ throw(message= "Unexpected git commitish '" ~ type ~ "' for '" ~ crate.name ~ "'") }}
+ {%- endif %}
+ {%- endfor %}
+ init_submodules = True,
+ {%- if attrs | get(key="patch_args", default=Null) %}
+ patch_args = [
+ {%- for arg in attrs.patch_args %}
+ "{{ arg }}",
+ {%- endfor %}
+ ],
+ {%- endif %}
+ {%- if attrs | get(key="patch_tool", default=Null) %}
+ patch_tool = "{{ attrs.patch_tool }}",
+ {%- endif %}
+ {%- if attrs | get(key="patches", default=Null) %}
+ patches = [
+ {%- for patch in attrs.patches %}
+ "{{ patch }}",
+ {%- endfor %}
+ ],
+ {%- endif %}
+ {%- if attrs | get(key="shallow_since", default=Null) %}
+ shallow_since = "{{ attrs.shallow_since }}",
+ {%- endif %}
+ remote = "{{ attrs.remote }}",
+ build_file = Label("{{ crate_build_file(name = crate.name, version = crate.version)}}"),
+ {%- if attrs.strip_prefix %}
+ strip_prefix = "{{ attrs.strip_prefix }}",
+ {%- endif %}
+ )
diff --git a/crate_universe/src/rendering/templates/partials/module/repo_http.j2 b/crate_universe/src/rendering/templates/partials/module/repo_http.j2
new file mode 100644
index 0000000..8e3f7dc
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/module/repo_http.j2
@@ -0,0 +1,28 @@
+ maybe(
+ http_archive,
+ name = "{{ crate_repository(name = crate.name, version = crate.version) }}",
+ {%- if attrs | get(key="patch_args", default=Null) %}
+ patch_args = [
+ {%- for arg in attrs.patch_args %}
+ "{{ arg }}",
+ {%- endfor %}
+ ],
+ {%- endif %}
+ {%- if attrs | get(key="patch_tool", default=Null) %}
+ patch_tool = "{{ attrs.patch_tool }}",
+ {%- endif %}
+ {%- if attrs | get(key="patches", default=Null) %}
+ patches = [
+ {%- for patch in attrs.patches %}
+ "{{ patch }}",
+ {%- endfor %}
+ ],
+ {%- endif %}
+ {%- if attrs | get(key="sha256", default=Null) %}
+ sha256 = "{{ attrs.sha256 }}",
+ {%- endif %}
+ type = "tar.gz",
+ urls = ["{{ attrs.url }}"],
+ strip_prefix = "{{ crate.name }}-{{ crate.version }}",
+ build_file = Label("{{ crate_build_file(name = crate.name, version = crate.version)}}"),
+ )
diff --git a/crate_universe/src/rendering/templates/partials/starlark/glob.j2 b/crate_universe/src/rendering/templates/partials/starlark/glob.j2
new file mode 100644
index 0000000..67f70af
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/starlark/glob.j2
@@ -0,0 +1,12 @@
+glob(
+ include = [
+ {%- for pattern in glob.include %}
+ "{{ pattern }}",
+ {%- endfor %}
+ ],
+ exclude = [
+ {%- for pattern in glob.exclude %}
+ "{{ pattern }}",
+ {%- endfor %}
+ ],
+ )
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/partials/starlark/selectable_dict.j2 b/crate_universe/src/rendering/templates/partials/starlark/selectable_dict.j2
new file mode 100644
index 0000000..8012cc7
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/starlark/selectable_dict.j2
@@ -0,0 +1,36 @@
+{%- set selectable = selectable | default(value=default_select_dict) %}
+{%- if selectable.selects | length -%}
+ selects.with_or({
+ {%- for cfg, map in selectable.selects %}
+ {%- if cfg in context.conditions and context.conditions[cfg] | length %}
+ # {{ cfg }}
+ (
+ {%- for triple in context.conditions[cfg] %}
+ "{{ platform_label(triple = triple) }}",
+ {%- endfor %}
+ ): {
+ {%- if selectable.common | length %}
+ {%- for key, val in selectable.common %}
+ "{{ key }}": "{{ val }}",
+ {%- endfor %}
+ {%- endif %}
+ {%- for key, val in map %}
+ "{{ key }}": "{{ val }}",
+ {%- endfor %}
+ },
+ {%- else %}
+ # No supported platform triples for cfg: '{{ cfg }}'
+ # Skipped dependencies: {{ map | json_encode| safe }}
+ {%- endif %}
+ {%- endfor %}
+ "//conditions:default": {},
+ })
+{%- else -%}
+ {
+ {%- if selectable.common | length %}
+ {%- for key, val in selectable.common %}
+ "{{ key }}": "{{ val }}",
+ {%- endfor %}
+ {%- endif %}
+ }
+{%- endif %}
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/partials/starlark/selectable_list.j2 b/crate_universe/src/rendering/templates/partials/starlark/selectable_list.j2
new file mode 100644
index 0000000..2641713
--- /dev/null
+++ b/crate_universe/src/rendering/templates/partials/starlark/selectable_list.j2
@@ -0,0 +1,31 @@
+select_with_or({
+ {%- set selectable = selectable | default(value=default_select_list) %}
+ {%- for cfg, values in selectable.selects %}
+ # {{ cfg }}
+ {%- if cfg in context.conditions and context.conditions[cfg] | length %}
+ (
+ {%- for triple in context.conditions[cfg] %}
+ "{{ platform_label(triple = triple) }}",
+ {%- endfor %}
+ ): [
+ # Target Deps
+ {%- for val in values %}
+ "{{ val }}",
+ {%- endfor %}
+
+ # Common Deps
+ {%- for val in selectable.common %}
+ "{{ val }}",
+ {%- endfor %}
+ ],
+ {%- else %}
+ # No supported platform triples for cfg: '{{ cfg }}'
+ # Skipped dependencies: {{ values | json_encode | safe }}
+ {%- endif %}
+ {%- endfor %}
+ "//conditions:default": [
+ {%- for val in selectable.common %}
+ "{{ val }}",
+ {%- endfor %}
+ ],
+ })
\ No newline at end of file
diff --git a/crate_universe/src/rendering/templates/vendor_module.j2 b/crate_universe/src/rendering/templates/vendor_module.j2
new file mode 100644
index 0000000..74b031e
--- /dev/null
+++ b/crate_universe/src/rendering/templates/vendor_module.j2
@@ -0,0 +1,25 @@
+###############################################################################
+# @generated
+# This file is auto-generated by the cargo-bazel tool.
+#
+# DO NOT MODIFY: Local changes may be replaced in future executions.
+###############################################################################
+"""Rules for defining repositories for remote `crates_vendor` repositories"""
+
+load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe")
+
+# buildifier: disable=bzl-visibility
+load("@rules_rust//crate_universe/private:crates_vendor.bzl", "crates_vendor_remote_repository")
+
+# buildifier: disable=bzl-visibility
+load("{{ crates_module_label(file="defs.bzl") }}", _crate_repositories = "crate_repositories")
+
+def crate_repositories():
+ maybe(
+ crates_vendor_remote_repository,
+ name = "{{ repository_name }}",
+ build_file = Label("{{ crates_module_label(file="BUILD.bazel") }}"),
+ defs_module = Label("{{ crates_module_label(file="defs.bzl") }}"),
+ )
+
+ _crate_repositories()
diff --git a/crate_universe/src/splicing.rs b/crate_universe/src/splicing.rs
new file mode 100644
index 0000000..0de1daa
--- /dev/null
+++ b/crate_universe/src/splicing.rs
@@ -0,0 +1,495 @@
+//! This module is responsible for finding a Cargo workspace
+
+pub(crate) mod cargo_config;
+mod splicer;
+
+use std::collections::{BTreeMap, BTreeSet, HashMap};
+use std::convert::TryFrom;
+use std::fs;
+use std::path::{Path, PathBuf};
+use std::str::FromStr;
+
+use anyhow::{bail, Context, Result};
+use cargo_toml::Manifest;
+use hex::ToHex;
+use serde::{Deserialize, Serialize};
+
+use crate::config::CrateId;
+use crate::metadata::LockGenerator;
+use crate::utils::starlark::Label;
+
+use self::cargo_config::CargoConfig;
+pub use self::splicer::*;
+
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct ExtraManifestInfo {
+ // The path to a Cargo Manifest
+ pub manifest: PathBuf,
+
+ // The URL where the manifest's package can be downloaded
+ pub url: String,
+
+ // The Sha256 checksum of the downloaded package located at `url`.
+ pub sha256: String,
+}
+
+type DirectPackageManifest = BTreeMap<String, cargo_toml::DependencyDetail>;
+
+#[derive(Debug, Default, Serialize, Deserialize, Clone)]
+#[serde(deny_unknown_fields)]
+pub struct SplicingManifest {
+ /// A set of all packages directly written to the rule
+ pub direct_packages: DirectPackageManifest,
+
+ /// A mapping of manifest paths to the labels representing them
+ pub manifests: BTreeMap<PathBuf, Label>,
+
+ /// The path of a Cargo config file
+ pub cargo_config: Option<PathBuf>,
+
+ /// The Cargo resolver version to use for splicing
+ pub resolver_version: cargo_toml::Resolver,
+}
+
+impl FromStr for SplicingManifest {
+ type Err = serde_json::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ serde_json::from_str(s)
+ }
+}
+
+impl SplicingManifest {
+ pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> {
+ let content = fs::read_to_string(path.as_ref())?;
+ Self::from_str(&content).context("Failed to load SplicingManifest")
+ }
+
+ pub fn absoulutize(self, relative_to: &Path) -> Self {
+ let Self {
+ manifests,
+ cargo_config,
+ ..
+ } = self;
+
+ // Ensure manifests all have absolute paths
+ let manifests = manifests
+ .into_iter()
+ .map(|(path, label)| {
+ if !path.is_absolute() {
+ let path = relative_to.join(path);
+ (path, label)
+ } else {
+ (path, label)
+ }
+ })
+ .collect();
+
+ // Ensure the cargo config is located at an absolute path
+ let cargo_config = cargo_config.map(|path| {
+ if !path.is_absolute() {
+ relative_to.join(path)
+ } else {
+ path
+ }
+ });
+
+ Self {
+ manifests,
+ cargo_config,
+ ..self
+ }
+ }
+}
+
+#[derive(Debug, Serialize, Default)]
+pub struct SplicingMetadata {
+ /// A set of all packages directly written to the rule
+ pub direct_packages: DirectPackageManifest,
+
+ /// A mapping of manifest paths to the labels representing them
+ pub manifests: BTreeMap<Label, cargo_toml::Manifest>,
+
+ /// The path of a Cargo config file
+ pub cargo_config: Option<CargoConfig>,
+}
+
+impl TryFrom<SplicingManifest> for SplicingMetadata {
+ type Error = anyhow::Error;
+
+ fn try_from(value: SplicingManifest) -> Result<Self, Self::Error> {
+ let direct_packages = value.direct_packages;
+
+ let manifests = value
+ .manifests
+ .into_iter()
+ .map(|(path, label)| {
+ let manifest = cargo_toml::Manifest::from_path(&path)
+ .with_context(|| format!("Failed to load manifest '{}'", path.display()))?;
+
+ Ok((label, manifest))
+ })
+ .collect::<Result<BTreeMap<Label, Manifest>>>()?;
+
+ let cargo_config = match value.cargo_config {
+ Some(path) => Some(
+ CargoConfig::try_from_path(&path)
+ .with_context(|| format!("Failed to load cargo config '{}'", path.display()))?,
+ ),
+ None => None,
+ };
+
+ Ok(Self {
+ direct_packages,
+ manifests,
+ cargo_config,
+ })
+ }
+}
+
+/// A collection of information required for reproducible "extra worksspace members".
+#[derive(Debug, Default, Serialize, Deserialize)]
+#[serde(deny_unknown_fields)]
+pub struct ExtraManifestsManifest {
+ pub manifests: Vec<ExtraManifestInfo>,
+}
+
+impl FromStr for ExtraManifestsManifest {
+ type Err = serde_json::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ serde_json::from_str(s)
+ }
+}
+
+impl ExtraManifestsManifest {
+ pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> {
+ let content = fs::read_to_string(path.as_ref())?;
+ Self::from_str(&content).context("Failed to load ExtraManifestsManifest")
+ }
+
+ pub fn absoulutize(self) -> Self {
+ self
+ }
+}
+
+#[derive(Debug, Default, Serialize, Deserialize, Clone)]
+pub struct SourceInfo {
+ /// A url where to a `.crate` file.
+ pub url: String,
+
+ /// The `.crate` file's sha256 checksum.
+ pub sha256: String,
+}
+
+/// Information about the Cargo workspace relative to the Bazel workspace
+#[derive(Debug, Default, Serialize, Deserialize)]
+pub struct WorkspaceMetadata {
+ /// A mapping of crates to information about where their source can be downloaded
+ #[serde(serialize_with = "toml::ser::tables_last")]
+ pub sources: BTreeMap<CrateId, SourceInfo>,
+
+ /// The path from the root of a Bazel workspace to the root of the Cargo workspace
+ pub workspace_prefix: Option<String>,
+
+ /// Paths from the root of a Bazel workspace to a Cargo package
+ #[serde(serialize_with = "toml::ser::tables_last")]
+ pub package_prefixes: BTreeMap<String, String>,
+}
+
+impl TryFrom<toml::Value> for WorkspaceMetadata {
+ type Error = anyhow::Error;
+
+ fn try_from(value: toml::Value) -> Result<Self, Self::Error> {
+ match value.get("cargo-bazel") {
+ Some(v) => v
+ .to_owned()
+ .try_into()
+ .context("Failed to deserialize toml value"),
+ None => bail!("cargo-bazel workspace metadata not found"),
+ }
+ }
+}
+
+impl TryFrom<serde_json::Value> for WorkspaceMetadata {
+ type Error = anyhow::Error;
+
+ fn try_from(value: serde_json::Value) -> Result<Self, Self::Error> {
+ match value.get("cargo-bazel") {
+ Some(value) => {
+ serde_json::from_value(value.to_owned()).context("Faield to deserialize json value")
+ }
+ None => bail!("cargo-bazel workspace metadata not found"),
+ }
+ }
+}
+
+impl WorkspaceMetadata {
+ fn new(
+ splicing_manifest: &SplicingManifest,
+ extra_manifests_manifest: &ExtraManifestsManifest,
+ injected_manifests: HashMap<&PathBuf, String>,
+ ) -> Result<Self> {
+ let mut sources = BTreeMap::new();
+
+ for config in extra_manifests_manifest.manifests.iter() {
+ let package = match read_manifest(&config.manifest) {
+ Ok(manifest) => match manifest.package {
+ Some(pkg) => pkg,
+ None => continue,
+ },
+ Err(e) => return Err(e),
+ };
+
+ let id = CrateId::new(package.name, package.version);
+ let info = SourceInfo {
+ url: config.url.clone(),
+ sha256: config.sha256.clone(),
+ };
+
+ sources.insert(id, info);
+ }
+
+ let mut package_prefixes: BTreeMap<String, String> = injected_manifests
+ .iter()
+ .filter_map(|(original_manifest, cargo_pkg_name)| {
+ let label = match splicing_manifest.manifests.get(*original_manifest) {
+ Some(v) => v,
+ None => return None,
+ };
+
+ let package = match &label.package {
+ Some(pkg) => PathBuf::from(pkg),
+ None => return None,
+ };
+
+ let prefix = package.to_string_lossy().to_string();
+
+ Some((cargo_pkg_name.clone(), prefix))
+ })
+ .collect();
+
+ // It is invald for toml maps to use empty strings as keys. In the case
+ // the empty key is expected to be the root package. If the root package
+ // has a prefix, then all other packages will as well (even if no other
+ // manifest represents them). The value is then saved as a separate value
+ let workspace_prefix = package_prefixes.remove("");
+
+ let package_prefixes = package_prefixes
+ .into_iter()
+ .map(|(k, v)| {
+ let prefix_path = PathBuf::from(v);
+ let prefix = prefix_path.parent().unwrap();
+ (k, prefix.to_string_lossy().to_string())
+ })
+ .collect();
+
+ Ok(Self {
+ sources,
+ workspace_prefix,
+ package_prefixes,
+ })
+ }
+
+ pub fn write_registry_urls(
+ lockfile: &cargo_lock::Lockfile,
+ manifest_path: &SplicedManifest,
+ ) -> Result<()> {
+ let mut manifest = read_manifest(manifest_path.as_path_buf())?;
+
+ let mut workspace_metaata = WorkspaceMetadata::try_from(
+ manifest
+ .workspace
+ .as_ref()
+ .unwrap()
+ .metadata
+ .as_ref()
+ .unwrap()
+ .clone(),
+ )?;
+
+ // Locate all packages soruced from a registry
+ let pkg_sources: Vec<&cargo_lock::Package> = lockfile
+ .packages
+ .iter()
+ .filter(|pkg| pkg.source.is_some())
+ .filter(|pkg| pkg.source.as_ref().unwrap().is_registry())
+ .collect();
+
+ // Collect a unique set of index urls
+ let index_urls: BTreeSet<String> = pkg_sources
+ .iter()
+ .map(|pkg| pkg.source.as_ref().unwrap().url().to_string())
+ .collect();
+
+ // Load the cargo config
+ let cargo_config = {
+ // Note that this path must match the one defined in `splicing::setup_cargo_config`
+ let config_path = manifest_path
+ .as_path_buf()
+ .parent()
+ .unwrap()
+ .join(".cargo")
+ .join("config.toml");
+
+ if config_path.exists() {
+ Some(CargoConfig::try_from_path(&config_path)?)
+ } else {
+ None
+ }
+ };
+
+ // Load each index for easy access
+ let crate_indexes = index_urls
+ .into_iter()
+ .map(|url| {
+ let index = {
+ // Ensure the correct registry is mapped based on the give Cargo config.
+ let index_url = if let Some(config) = &cargo_config {
+ if let Some(source) = config.get_source_from_url(&url) {
+ if let Some(replace_with) = &source.replace_with {
+ if let Some(replacement) = config.get_registry_index_url_by_name(replace_with) {
+ replacement
+ } else {
+ bail!("Tried to replace registry {} with registry named {} but didn't have metadata about the replacement", url, replace_with);
+ }
+ } else {
+ &url
+ }
+ } else {
+ &url
+ }
+ } else {
+ &url
+ };
+
+ // Load the index for the current url
+ let index = crates_index::Index::from_url(index_url)
+ .with_context(|| format!("Failed to load index for url: {}", index_url))?;
+
+ // Ensure each index has a valid index config
+ index.index_config().with_context(|| {
+ format!("`config.json` not found in index: {}", index_url)
+ })?;
+
+ index
+ };
+
+ Ok((url, index))
+ })
+ .collect::<Result<BTreeMap<String, crates_index::Index>>>()
+ .context("Failed to locate crate indexes")?;
+
+ // Get the download URL of each package based on it's registry url.
+ let additional_sources = pkg_sources
+ .iter()
+ .filter_map(|pkg| {
+ let source_id = pkg.source.as_ref().unwrap();
+ let index = &crate_indexes[&source_id.url().to_string()];
+ let index_config = index.index_config().unwrap();
+
+ index.crate_(pkg.name.as_str()).map(|crate_idx| {
+ crate_idx
+ .versions()
+ .iter()
+ .find(|v| v.version() == pkg.version.to_string())
+ .and_then(|v| {
+ v.download_url(&index_config).map(|url| {
+ let crate_id =
+ CrateId::new(v.name().to_owned(), v.version().to_owned());
+ let sha256 = pkg
+ .checksum
+ .as_ref()
+ .and_then(|sum| {
+ sum.as_sha256().map(|sum| sum.encode_hex::<String>())
+ })
+ .unwrap_or_else(|| v.checksum().encode_hex::<String>());
+ let source_info = SourceInfo { url, sha256 };
+ (crate_id, source_info)
+ })
+ })
+ })
+ })
+ .flatten();
+
+ workspace_metaata.sources.extend(additional_sources);
+ workspace_metaata.inject_into(&mut manifest)?;
+
+ write_root_manifest(manifest_path.as_path_buf(), manifest)?;
+
+ Ok(())
+ }
+
+ fn inject_into(&self, manifest: &mut Manifest) -> Result<()> {
+ let metadata_value = toml::Value::try_from(self)?;
+ let mut workspace = manifest.workspace.as_mut().unwrap();
+
+ match &mut workspace.metadata {
+ Some(data) => match data.as_table_mut() {
+ Some(map) => {
+ map.insert("cargo-bazel".to_owned(), metadata_value);
+ }
+ None => bail!("The metadata field is always expected to be a table"),
+ },
+ None => {
+ let mut table = toml::map::Map::new();
+ table.insert("cargo-bazel".to_owned(), metadata_value);
+ workspace.metadata = Some(toml::Value::Table(table))
+ }
+ }
+
+ Ok(())
+ }
+}
+
+#[derive(Debug)]
+pub enum SplicedManifest {
+ Workspace(PathBuf),
+ Package(PathBuf),
+ MultiPackage(PathBuf),
+}
+
+impl SplicedManifest {
+ pub fn as_path_buf(&self) -> &PathBuf {
+ match self {
+ SplicedManifest::Workspace(p) => p,
+ SplicedManifest::Package(p) => p,
+ SplicedManifest::MultiPackage(p) => p,
+ }
+ }
+}
+
+pub fn read_manifest(manifest: &Path) -> Result<Manifest> {
+ let content = fs::read_to_string(manifest)?;
+ cargo_toml::Manifest::from_str(content.as_str()).context("Failed to deserialize manifest")
+}
+
+pub fn generate_lockfile(
+ manifest_path: &SplicedManifest,
+ existing_lock: &Option<PathBuf>,
+ cargo_bin: &Path,
+ rustc_bin: &Path,
+) -> Result<cargo_lock::Lockfile> {
+ let manifest_dir = manifest_path
+ .as_path_buf()
+ .parent()
+ .expect("Every manifest should be contained in a parent directory");
+
+ let root_lockfile_path = manifest_dir.join("Cargo.lock");
+
+ // Remove the file so it's not overwitten if it happens to be a symlink.
+ if root_lockfile_path.exists() {
+ fs::remove_file(&root_lockfile_path)?;
+ }
+
+ // Generate the new lockfile
+ let lockfile = LockGenerator::new(PathBuf::from(cargo_bin), PathBuf::from(rustc_bin))
+ .generate(manifest_path.as_path_buf(), existing_lock)?;
+
+ // Write the lockfile to disk
+ if !root_lockfile_path.exists() {
+ bail!("Failed to generate Cargo.lock file")
+ }
+
+ Ok(lockfile)
+}
diff --git a/crate_universe/src/splicing/cargo_config.rs b/crate_universe/src/splicing/cargo_config.rs
new file mode 100644
index 0000000..ffa42b7
--- /dev/null
+++ b/crate_universe/src/splicing/cargo_config.rs
@@ -0,0 +1,244 @@
+//! Tools for parsing [Cargo configuration](https://doc.rust-lang.org/cargo/reference/config.html) files
+
+use std::collections::BTreeMap;
+use std::fs;
+use std::path::Path;
+use std::str::FromStr;
+
+use anyhow::Result;
+use serde::{Deserialize, Serialize};
+
+/// The [`[registry]`](https://doc.rust-lang.org/cargo/reference/config.html#registry)
+/// table controls the default registry used when one is not specified.
+#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
+pub struct Registry {
+ /// name of the default registry
+ pub default: String,
+
+ /// authentication token for crates.io
+ pub token: Option<String>,
+}
+
+/// The [`[source]`](https://doc.rust-lang.org/cargo/reference/config.html#source)
+/// table defines the registry sources available.
+#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
+pub struct Source {
+ /// replace this source with the given named source
+ #[serde(rename = "replace-with")]
+ pub replace_with: Option<String>,
+
+ /// URL to a registry source
+ #[serde(default = "default_registry_url")]
+ pub registry: String,
+}
+
+/// This is the default registry url per what's defined by Cargo.
+fn default_registry_url() -> String {
+ "https://github.com/rust-lang/crates.io-index".to_owned()
+}
+
+#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
+/// registries other than crates.io
+pub struct AdditionalRegistry {
+ /// URL of the registry index
+ pub index: String,
+
+ /// authentication token for the registry
+ pub token: Option<String>,
+}
+
+/// A subset of a Cargo configuration file. The schema here is only what
+/// is required for parsing registry information.
+/// See [cargo docs](https://doc.rust-lang.org/cargo/reference/config.html#configuration-format)
+/// for more details.
+#[derive(Debug, Deserialize, Serialize, PartialEq, Eq)]
+pub struct CargoConfig {
+ /// registries other than crates.io
+ #[serde(default = "default_registries")]
+ pub registries: BTreeMap<String, AdditionalRegistry>,
+
+ #[serde(default = "default_registry")]
+ pub registry: Registry,
+
+ /// source definition and replacement
+ #[serde(default = "BTreeMap::new")]
+ pub source: BTreeMap<String, Source>,
+}
+
+/// Each Cargo config is expected to have a default `crates-io` registry.
+fn default_registries() -> BTreeMap<String, AdditionalRegistry> {
+ let mut registries = BTreeMap::new();
+ registries.insert(
+ "crates-io".to_owned(),
+ AdditionalRegistry {
+ index: default_registry_url(),
+ token: None,
+ },
+ );
+ registries
+}
+
+/// Each Cargo config has a default registry for `crates.io`.
+fn default_registry() -> Registry {
+ Registry {
+ default: "crates-io".to_owned(),
+ token: None,
+ }
+}
+
+impl Default for CargoConfig {
+ fn default() -> Self {
+ let registries = default_registries();
+ let registry = default_registry();
+ let source = Default::default();
+
+ Self {
+ registries,
+ registry,
+ source,
+ }
+ }
+}
+
+impl FromStr for CargoConfig {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let incoming: CargoConfig = toml::from_str(s)?;
+ let mut config = Self::default();
+ config.registries.extend(incoming.registries);
+ config.source.extend(incoming.source);
+ config.registry = incoming.registry;
+ Ok(config)
+ }
+}
+
+impl CargoConfig {
+ /// Load a Cargo conig from a path to a file on disk.
+ pub fn try_from_path(path: &Path) -> Result<Self> {
+ let content = fs::read_to_string(path)?;
+ Self::from_str(&content)
+ }
+
+ /// Look up a reigstry [Source] by it's url.
+ pub fn get_source_from_url(&self, url: &str) -> Option<&Source> {
+ self.source.values().find(|v| v.registry == url)
+ }
+
+ pub fn get_registry_index_url_by_name(&self, name: &str) -> Option<&str> {
+ if let Some(registry) = self.registries.get(name) {
+ Some(®istry.index)
+ } else if let Some(source) = self.source.get(name) {
+ Some(&source.registry)
+ } else {
+ None
+ }
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use std::fs;
+
+ #[test]
+ fn registry_settings() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let config = temp_dir.as_ref().join("config.toml");
+
+ fs::write(&config, textwrap::dedent(
+ r##"
+ # Makes artifactory the default registry and saves passing --registry parameter
+ [registry]
+ default = "art-crates-remote"
+
+ [registries]
+ # Remote repository proxy in Artifactory (read-only)
+ art-crates-remote = { index = "https://artprod.mycompany/artifactory/git/cargo-remote.git" }
+
+ # Optional, use with --registry to publish to crates.io
+ crates-io = { index = "https://github.com/rust-lang/crates.io-index" }
+
+ [net]
+ git-fetch-with-cli = true
+ "##,
+ )).unwrap();
+
+ let config = CargoConfig::try_from_path(&config).unwrap();
+ assert_eq!(
+ config,
+ CargoConfig {
+ registries: BTreeMap::from([
+ (
+ "art-crates-remote".to_owned(),
+ AdditionalRegistry {
+ index: "https://artprod.mycompany/artifactory/git/cargo-remote.git"
+ .to_owned(),
+ token: None,
+ },
+ ),
+ (
+ "crates-io".to_owned(),
+ AdditionalRegistry {
+ index: "https://github.com/rust-lang/crates.io-index".to_owned(),
+ token: None,
+ },
+ ),
+ ]),
+ registry: Registry {
+ default: "art-crates-remote".to_owned(),
+ token: None,
+ },
+ source: BTreeMap::new(),
+ },
+ )
+ }
+
+ #[test]
+ fn registry_settings_get_index_url_by_name_from_source() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let config = temp_dir.as_ref().join("config.toml");
+
+ fs::write(&config, textwrap::dedent(
+ r##"
+ [registries]
+ art-crates-remote = { index = "https://artprod.mycompany/artifactory/git/cargo-remote.git" }
+
+ [source.crates-io]
+ replace-with = "some-mirror"
+
+ [source.some-mirror]
+ registry = "https://artmirror.mycompany/artifactory/cargo-mirror.git"
+ "##,
+ )).unwrap();
+
+ let config = CargoConfig::try_from_path(&config).unwrap();
+ assert_eq!(
+ config.get_registry_index_url_by_name("some-mirror"),
+ Some("https://artmirror.mycompany/artifactory/cargo-mirror.git"),
+ );
+ }
+
+ #[test]
+ fn registry_settings_get_index_url_by_name_from_registry() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let config = temp_dir.as_ref().join("config.toml");
+
+ fs::write(&config, textwrap::dedent(
+ r##"
+ [registries]
+ art-crates-remote = { index = "https://artprod.mycompany/artifactory/git/cargo-remote.git" }
+
+ [source.crates-io]
+ replace-with = "art-crates-remote"
+ "##,
+ )).unwrap();
+
+ let config = CargoConfig::try_from_path(&config).unwrap();
+ assert_eq!(
+ config.get_registry_index_url_by_name("art-crates-remote"),
+ Some("https://artprod.mycompany/artifactory/git/cargo-remote.git"),
+ );
+ }
+}
diff --git a/crate_universe/src/splicing/splicer.rs b/crate_universe/src/splicing/splicer.rs
new file mode 100644
index 0000000..5e3ef27
--- /dev/null
+++ b/crate_universe/src/splicing/splicer.rs
@@ -0,0 +1,1443 @@
+//! Utility for creating valid Cargo workspaces
+
+use std::collections::{BTreeSet, HashMap};
+use std::fs;
+use std::path::{Path, PathBuf};
+
+use anyhow::{bail, Context, Result};
+use cargo_toml::{Dependency, Manifest};
+
+use crate::config::CrateId;
+use crate::splicing::{SplicedManifest, SplicingManifest};
+use crate::utils::starlark::Label;
+
+use super::{
+ read_manifest, DirectPackageManifest, ExtraManifestInfo, ExtraManifestsManifest,
+ WorkspaceMetadata,
+};
+
+/// The core splicer implementation. Each style of Bazel workspace should be represented
+/// here and a splicing implementation defined.
+pub enum SplicerKind<'a> {
+ /// Splice a manifest which is represented by a Cargo workspace
+ Workspace {
+ path: &'a PathBuf,
+ manifest: &'a Manifest,
+ splicing_manifest: &'a SplicingManifest,
+ extra_manifests_manifest: &'a ExtraManifestsManifest,
+ },
+ /// Splice a manifest for a single package. This includes cases where
+ /// were defined directly in Bazel.
+ Package {
+ path: &'a PathBuf,
+ manifest: &'a Manifest,
+ splicing_manifest: &'a SplicingManifest,
+ extra_manifests_manifest: &'a ExtraManifestsManifest,
+ },
+ /// Splice a manifest from multiple disjoint Cargo manifests.
+ MultiPackage {
+ manifests: &'a HashMap<PathBuf, Manifest>,
+ splicing_manifest: &'a SplicingManifest,
+ extra_manifests_manifest: &'a ExtraManifestsManifest,
+ },
+}
+
+/// A list of files or directories to ignore when when symlinking
+const IGNORE_LIST: &[&str] = &[".git", "bazel-*", ".svn"];
+
+impl<'a> SplicerKind<'a> {
+ pub fn new(
+ manifests: &'a HashMap<PathBuf, Manifest>,
+ splicing_manifest: &'a SplicingManifest,
+ extra_manifests_manifest: &'a ExtraManifestsManifest,
+ ) -> Result<Self> {
+ // First check for any workspaces in the provided manifests
+ let workspace_owned: HashMap<&PathBuf, &Manifest> = manifests
+ .iter()
+ .filter(|(_, manifest)| is_workspace_owned(manifest))
+ .collect();
+
+ let mut root_workspace_pair: Option<(&PathBuf, &Manifest)> = None;
+
+ if !workspace_owned.is_empty() {
+ // Filter for the root workspace manifest info
+ let (mut workspace_roots, workspace_packages): (
+ HashMap<&PathBuf, &Manifest>,
+ HashMap<&PathBuf, &Manifest>,
+ ) = workspace_owned
+ .clone()
+ .into_iter()
+ .partition(|(_, manifest)| is_workspace_root(manifest));
+
+ if workspace_roots.len() > 1 {
+ bail!("When splicing manifests, there can only be 1 root workspace manifest");
+ }
+
+ // Ensure all workspace owned manifests are members of the one workspace root
+ let (root_manifest_path, root_manifest) = workspace_roots.drain().last().unwrap();
+ let external_workspace_members: BTreeSet<String> = workspace_packages
+ .into_iter()
+ .filter(|(manifest_path, _)| {
+ !is_workspace_member(root_manifest, root_manifest_path, manifest_path)
+ })
+ .map(|(path, _)| path.to_string_lossy().to_string())
+ .collect();
+
+ if !external_workspace_members.is_empty() {
+ bail!("A package was provided that appears to be a part of another workspace.\nworkspace root: '{}'\nexternal packages: {:#?}", root_manifest_path.display(), external_workspace_members)
+ }
+
+ // Ensure all workspace members are present for the given workspace
+ let workspace_members = root_manifest.workspace.as_ref().unwrap().members.clone();
+ let missing_manifests: BTreeSet<String> = workspace_members
+ .into_iter()
+ .filter(|member| {
+ // Check for any members that are missing from the list of manifests
+ !manifests.keys().any(|path| {
+ let path_str = path.to_string_lossy().to_string();
+ // Account for windows paths.
+ let path_str = path_str.replace("\\", "/");
+ // Workspace members are represented as directories.
+ path_str.trim_end_matches("/Cargo.toml").ends_with(member)
+ })
+ })
+ .filter_map(|path_str| {
+ // UNWRAP: Safe because a Cargo.toml file must have a parent directory.
+ let cargo_manifest_dir = root_manifest_path.parent().unwrap();
+ let label = Label::from_absolute_path(
+ &cargo_manifest_dir.join(path_str).join("Cargo.toml"),
+ );
+ match label {
+ Ok(label) => Some(label.to_string()),
+ Err(err) => {
+ eprintln!("Failed to identify label for missing manifest: {}", err);
+ None
+ }
+ }
+ })
+ .collect();
+
+ if !missing_manifests.is_empty() {
+ bail!("Some manifests are not being tracked. Please add the following labels to the `manifests` key: {:#?}", missing_manifests)
+ }
+
+ root_workspace_pair = Some((root_manifest_path, root_manifest));
+ }
+
+ if let Some((path, manifest)) = root_workspace_pair {
+ Ok(Self::Workspace {
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ })
+ } else if manifests.len() == 1 {
+ let (path, manifest) = manifests.iter().last().unwrap();
+ Ok(Self::Package {
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ })
+ } else {
+ Ok(Self::MultiPackage {
+ manifests,
+ splicing_manifest,
+ extra_manifests_manifest,
+ })
+ }
+ }
+
+ /// Performs splicing based on the current variant.
+ pub fn splice(&self, workspace_dir: &Path) -> Result<SplicedManifest> {
+ match self {
+ SplicerKind::Workspace {
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ } => Self::splice_workspace(
+ workspace_dir,
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ ),
+ SplicerKind::Package {
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ } => Self::splice_package(
+ workspace_dir,
+ path,
+ manifest,
+ splicing_manifest,
+ extra_manifests_manifest,
+ ),
+ SplicerKind::MultiPackage {
+ manifests,
+ splicing_manifest,
+ extra_manifests_manifest,
+ } => Self::splice_multi_package(
+ workspace_dir,
+ manifests,
+ splicing_manifest,
+ extra_manifests_manifest,
+ ),
+ }
+ }
+
+ fn splice_workspace(
+ workspace_dir: &Path,
+ path: &&PathBuf,
+ manifest: &&Manifest,
+ splicing_manifest: &&SplicingManifest,
+ extra_manifests_manifest: &&ExtraManifestsManifest,
+ ) -> Result<SplicedManifest> {
+ let mut manifest = (*manifest).clone();
+ let manifest_dir = path
+ .parent()
+ .expect("Every manifest should havee a parent directory");
+
+ let extra_workspace_manifests =
+ Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
+
+ // Link the sources of the root manifest into the new workspace
+ symlink_roots(manifest_dir, workspace_dir, Some(IGNORE_LIST))?;
+
+ // Optionally install the cargo config after contents have been symlinked
+ Self::setup_cargo_config(&splicing_manifest.cargo_config, workspace_dir)?;
+
+ // Add additional workspace members to the new manifest
+ let mut installations = Self::inject_workspace_members(
+ &mut manifest,
+ &extra_workspace_manifests,
+ workspace_dir,
+ )?;
+
+ // Add any additional depeendencies to the root package
+ Self::inject_direct_packages(&mut manifest, &splicing_manifest.direct_packages)?;
+
+ let root_manifest_path = workspace_dir.join("Cargo.toml");
+ installations.insert(path, String::new());
+
+ // Write the generated metadata to the manifest
+ let workspace_metadata =
+ WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ workspace_metadata.inject_into(&mut manifest)?;
+
+ // Write the root manifest
+ write_root_manifest(&root_manifest_path, manifest)?;
+
+ Ok(SplicedManifest::Workspace(root_manifest_path))
+ }
+
+ fn splice_package(
+ workspace_dir: &Path,
+ path: &&PathBuf,
+ manifest: &&Manifest,
+ splicing_manifest: &&SplicingManifest,
+ extra_manifests_manifest: &&ExtraManifestsManifest,
+ ) -> Result<SplicedManifest> {
+ let manifest_dir = path
+ .parent()
+ .expect("Every manifest should havee a parent directory");
+
+ let extra_workspace_manifests =
+ Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
+
+ // Link the sources of the root manifest into the new workspace
+ symlink_roots(manifest_dir, workspace_dir, Some(IGNORE_LIST))?;
+
+ // Optionally install the cargo config after contents have been symlinked
+ Self::setup_cargo_config(&splicing_manifest.cargo_config, workspace_dir)?;
+
+ // Ensure the root package manifest has a populated `workspace` member
+ let mut manifest = (*manifest).clone();
+ if manifest.workspace.is_none() {
+ manifest.workspace =
+ default_cargo_workspace_manifest(&splicing_manifest.resolver_version).workspace
+ }
+
+ // Add additional workspace members to the new manifest
+ let mut installations = Self::inject_workspace_members(
+ &mut manifest,
+ &extra_workspace_manifests,
+ workspace_dir,
+ )?;
+
+ // Add any additional depeendencies to the root package
+ Self::inject_direct_packages(&mut manifest, &splicing_manifest.direct_packages)?;
+
+ let root_manifest_path = workspace_dir.join("Cargo.toml");
+ installations.insert(path, String::new());
+
+ // Write the generated metadata to the manifest
+ let workspace_metadata =
+ WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ workspace_metadata.inject_into(&mut manifest)?;
+
+ // Write the root manifest
+ write_root_manifest(&root_manifest_path, manifest)?;
+
+ Ok(SplicedManifest::Package(root_manifest_path))
+ }
+
+ fn splice_multi_package(
+ workspace_dir: &Path,
+ manifests: &&HashMap<PathBuf, Manifest>,
+ splicing_manifest: &&SplicingManifest,
+ extra_manifests_manifest: &&ExtraManifestsManifest,
+ ) -> Result<SplicedManifest> {
+ let mut manifest = default_cargo_workspace_manifest(&splicing_manifest.resolver_version);
+
+ // Optionally install a cargo config file into the workspace root.
+ Self::setup_cargo_config(&splicing_manifest.cargo_config, workspace_dir)?;
+
+ let extra_workspace_manifests =
+ Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
+
+ let manifests: HashMap<PathBuf, Manifest> = manifests
+ .iter()
+ .map(|(p, m)| (p.to_owned(), m.to_owned()))
+ .collect();
+
+ let all_manifests = manifests
+ .iter()
+ .chain(extra_workspace_manifests.iter())
+ .map(|(k, v)| (k.clone(), v.clone()))
+ .collect();
+
+ let installations =
+ Self::inject_workspace_members(&mut manifest, &all_manifests, workspace_dir)?;
+
+ // Write the generated metadata to the manifest
+ let workspace_metadata =
+ WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ workspace_metadata.inject_into(&mut manifest)?;
+
+ // Add any additional depeendencies to the root package
+ Self::inject_direct_packages(&mut manifest, &splicing_manifest.direct_packages)?;
+
+ // Write the root manifest
+ let root_manifest_path = workspace_dir.join("Cargo.toml");
+ write_root_manifest(&root_manifest_path, manifest)?;
+
+ Ok(SplicedManifest::MultiPackage(root_manifest_path))
+ }
+
+ /// Extract the set of extra workspace member manifests such that it matches
+ /// how other manifests are passed when creating a new [SplicerKind].
+ fn get_extra_workspace_manifests(
+ extra_manifests: &[ExtraManifestInfo],
+ ) -> Result<HashMap<PathBuf, Manifest>> {
+ extra_manifests
+ .iter()
+ .map(|config| match read_manifest(&config.manifest) {
+ Ok(manifest) => Ok((config.manifest.clone(), manifest)),
+ Err(err) => Err(err),
+ })
+ .collect()
+ }
+
+ /// A helper for installing Cargo config files into the spliced workspace while also
+ /// ensuring no other linked config file is available
+ fn setup_cargo_config(cargo_config_path: &Option<PathBuf>, workspace_dir: &Path) -> Result<()> {
+ // Make sure no other config files exist
+ for config in vec![
+ workspace_dir.join("config"),
+ workspace_dir.join("config.toml"),
+ ] {
+ if config.exists() {
+ fs::remove_file(&config).with_context(|| {
+ format!(
+ "Failed to delete existing cargo config: {}",
+ config.display()
+ )
+ })?;
+ }
+ }
+
+ // If the `.cargo` dir is a symlink, we'll need to relink it and ensure
+ // a Cargo config file is omitted
+ let dot_cargo_dir = workspace_dir.join(".cargo");
+ if dot_cargo_dir.exists() {
+ let is_symlink = dot_cargo_dir
+ .symlink_metadata()
+ .map(|m| m.file_type().is_symlink())
+ .unwrap_or(false);
+ if is_symlink {
+ let real_path = dot_cargo_dir.canonicalize()?;
+ remove_symlink(&dot_cargo_dir).with_context(|| {
+ format!(
+ "Failed to remove existing symlink {}",
+ dot_cargo_dir.display()
+ )
+ })?;
+ fs::create_dir(&dot_cargo_dir)?;
+ symlink_roots(&real_path, &dot_cargo_dir, Some(&["config", "config.toml"]))?;
+ } else {
+ for config in vec![
+ dot_cargo_dir.join("config"),
+ dot_cargo_dir.join("config.toml"),
+ ] {
+ if config.exists() {
+ fs::remove_file(&config)?;
+ }
+ }
+ }
+ }
+
+ // Install the new config file after having removed all others
+ if let Some(cargo_config_path) = cargo_config_path {
+ if !dot_cargo_dir.exists() {
+ fs::create_dir_all(&dot_cargo_dir)?;
+ }
+
+ fs::copy(cargo_config_path, &dot_cargo_dir.join("config.toml"))?;
+ }
+
+ Ok(())
+ }
+
+ /// Update the newly generated manifest to include additional packages as
+ /// Cargo workspace members.
+ fn inject_workspace_members<'b>(
+ root_manifest: &mut Manifest,
+ manifests: &'b HashMap<PathBuf, Manifest>,
+ workspace_dir: &Path,
+ ) -> Result<HashMap<&'b PathBuf, String>> {
+ manifests
+ .iter()
+ .map(|(path, manifest)| {
+ let package_name = &manifest
+ .package
+ .as_ref()
+ .expect("Each manifest should have a root package")
+ .name;
+
+ root_manifest
+ .workspace
+ .as_mut()
+ .expect("The root manifest is expected to always have a workspace")
+ .members
+ .push(package_name.clone());
+
+ let manifest_dir = path
+ .parent()
+ .expect("Every manifest should havee a parent directory");
+
+ let dest_package_dir = workspace_dir.join(package_name);
+
+ match symlink_roots(manifest_dir, &dest_package_dir, Some(IGNORE_LIST)) {
+ Ok(_) => Ok((path, package_name.clone())),
+ Err(e) => Err(e),
+ }
+ })
+ .collect()
+ }
+
+ fn inject_direct_packages(
+ manifest: &mut Manifest,
+ direct_packages_manifest: &DirectPackageManifest,
+ ) -> Result<()> {
+ // Ensure there's a root package to satisfy Cargo requirements
+ if manifest.package.is_none() {
+ let new_manifest = default_cargo_package_manifest();
+ manifest.package = new_manifest.package;
+ if manifest.lib.is_none() {
+ manifest.lib = new_manifest.lib;
+ }
+ }
+
+ // Check for any duplicates
+ let duplicates: Vec<&String> = manifest
+ .dependencies
+ .keys()
+ .filter(|k| direct_packages_manifest.contains_key(*k))
+ .collect();
+ if !duplicates.is_empty() {
+ bail!(
+ "Duplications detected between manifest dependencies and direct dependencies: {:?}",
+ duplicates
+ )
+ }
+
+ // Add the dependencies
+ for (name, details) in direct_packages_manifest.iter() {
+ manifest.dependencies.insert(
+ name.clone(),
+ cargo_toml::Dependency::Detailed(details.clone()),
+ );
+ }
+
+ Ok(())
+ }
+}
+
+pub struct Splicer {
+ workspace_dir: PathBuf,
+ manifests: HashMap<PathBuf, Manifest>,
+ splicing_manifest: SplicingManifest,
+ extra_manifests_manifest: ExtraManifestsManifest,
+}
+
+impl Splicer {
+ pub fn new(
+ workspace_dir: PathBuf,
+ splicing_manifest: SplicingManifest,
+ extra_manifests_manifest: ExtraManifestsManifest,
+ ) -> Result<Self> {
+ // Load all manifests
+ let manifests = splicing_manifest
+ .manifests
+ .iter()
+ .map(|(path, _)| {
+ let m = read_manifest(path)
+ .with_context(|| format!("Failed to read manifest at {}", path.display()))?;
+ Ok((path.clone(), m))
+ })
+ .collect::<Result<HashMap<PathBuf, Manifest>>>()?;
+
+ Ok(Self {
+ workspace_dir,
+ manifests,
+ splicing_manifest,
+ extra_manifests_manifest,
+ })
+ }
+
+ /// Build a new workspace root
+ pub fn splice_workspace(&self) -> Result<SplicedManifest> {
+ SplicerKind::new(
+ &self.manifests,
+ &self.splicing_manifest,
+ &self.extra_manifests_manifest,
+ )?
+ .splice(&self.workspace_dir)
+ }
+}
+
+const DEFAULT_SPLICING_PACKAGE_NAME: &str = "direct-cargo-bazel-deps";
+const DEFAULT_SPLICING_PACKAGE_VERSION: &str = "0.0.1";
+
+pub fn default_cargo_package_manifest() -> cargo_toml::Manifest {
+ // A manifest is generated with a fake workspace member so the [cargo_toml::Manifest::Workspace]
+ // member is deseralized and is not `None`.
+ let manifest = cargo_toml::Manifest::from_str(
+ &toml::toml! {
+ [package]
+ name = DEFAULT_SPLICING_PACKAGE_NAME
+ version = DEFAULT_SPLICING_PACKAGE_VERSION
+ edition = "2018"
+
+ // A fake target used to satisfy requirements of Cargo.
+ [lib]
+ name = "direct_cargo_bazel_deps"
+ path = ".direct_cargo_bazel_deps.rs"
+ }
+ .to_string(),
+ )
+ .unwrap();
+
+ manifest
+}
+
+pub fn default_splicing_package_crate_id() -> CrateId {
+ CrateId::new(
+ DEFAULT_SPLICING_PACKAGE_NAME.to_string(),
+ DEFAULT_SPLICING_PACKAGE_VERSION.to_string(),
+ )
+}
+
+pub fn default_cargo_workspace_manifest(
+ resolver_version: &cargo_toml::Resolver,
+) -> cargo_toml::Manifest {
+ // A manifest is generated with a fake workspace member so the [cargo_toml::Manifest::Workspace]
+ // member is deseralized and is not `None`.
+ let mut manifest = cargo_toml::Manifest::from_str(&textwrap::dedent(&format!(
+ r#"
+ [workspace]
+ resolver = "{}"
+ "#,
+ resolver_version,
+ )))
+ .unwrap();
+
+ // Drop the temp workspace member
+ manifest.workspace.as_mut().unwrap().members.pop();
+
+ manifest
+}
+
+/// Determine whtether or not the manifest is a workspace root
+pub fn is_workspace_root(manifest: &Manifest) -> bool {
+ // Anything with any workspace data is considered a workspace
+ manifest.workspace.is_some()
+}
+
+/// Evaluates whether or not a manifest is considered a "workspace" manifest.
+/// See [Cargo workspaces](https://doc.rust-lang.org/cargo/reference/workspaces.html).
+pub fn is_workspace_owned(manifest: &Manifest) -> bool {
+ if is_workspace_root(manifest) {
+ return true;
+ }
+
+ // Additionally, anything that contains path dependencies is also considered a workspace
+ manifest.dependencies.iter().any(|(_, dep)| match dep {
+ Dependency::Detailed(dep) => dep.path.is_some(),
+ _ => false,
+ })
+}
+
+/// Determines whether or not a particular manifest is a workspace member to a given root manifest
+pub fn is_workspace_member(
+ root_manifest: &Manifest,
+ root_manifest_path: &Path,
+ manifest_path: &Path,
+) -> bool {
+ let members = match root_manifest.workspace.as_ref() {
+ Some(workspace) => &workspace.members,
+ None => return false,
+ };
+
+ let root_parent = root_manifest_path
+ .parent()
+ .expect("All manifest paths should have a parent");
+ let manifest_abs_path = root_parent.join(manifest_path);
+
+ members.iter().any(|member| {
+ let member_manifest_path = root_parent.join(member).join("Cargo.toml");
+ member_manifest_path == manifest_abs_path
+ })
+}
+
+pub fn write_root_manifest(path: &Path, manifest: cargo_toml::Manifest) -> Result<()> {
+ // Remove the file in case one exists already, preventing symlinked files
+ // from having their contents overwritten.
+ if path.exists() {
+ fs::remove_file(path)?;
+ }
+
+ // Ensure the directory exists
+ if let Some(parent) = path.parent() {
+ fs::create_dir_all(parent)?;
+ }
+
+ // TODO(https://gitlab.com/crates.rs/cargo_toml/-/issues/3)
+ let value = toml::Value::try_from(&manifest)?;
+ fs::write(path, toml::to_string(&value)?)
+ .context(format!("Failed to write manifest to {}", path.display()))
+}
+
+/// Create a symlink file on unix systems
+#[cfg(target_family = "unix")]
+fn symlink(src: &Path, dest: &Path) -> Result<(), std::io::Error> {
+ std::os::unix::fs::symlink(src, dest)
+}
+
+/// Create a symlink file on windows systems
+#[cfg(target_family = "windows")]
+fn symlink(src: &Path, dest: &Path) -> Result<(), std::io::Error> {
+ if src.is_dir() {
+ std::os::windows::fs::symlink_dir(src, dest)
+ } else {
+ std::os::windows::fs::symlink_file(src, dest)
+ }
+}
+
+/// Create a symlink file on unix systems
+#[cfg(target_family = "unix")]
+fn remove_symlink(path: &Path) -> Result<(), std::io::Error> {
+ fs::remove_file(path)
+}
+
+/// Create a symlink file on windows systems
+#[cfg(target_family = "windows")]
+fn remove_symlink(path: &Path) -> Result<(), std::io::Error> {
+ if path.is_dir() {
+ fs::remove_dir(path)
+ } else {
+ fs::remove_file(path)
+ }
+}
+
+/// Symlinks the root contents of a source directory into a destination directory
+pub fn symlink_roots(source: &Path, dest: &Path, ignore_list: Option<&[&str]>) -> Result<()> {
+ // Ensure the source exists and is a directory
+ if !source.is_dir() {
+ bail!("Source path is not a directory: {}", source.display());
+ }
+
+ // Only check if the dest is a directory if it already exists
+ if dest.exists() && !dest.is_dir() {
+ bail!("Dest path is not a directory: {}", dest.display());
+ }
+
+ fs::create_dir_all(dest)?;
+
+ // Link each directory entry from the source dir to the dest
+ for entry in (source.read_dir()?).flatten() {
+ let basename = entry.file_name();
+
+ // Ignore certain directories that may lead to confusion
+ if let Some(base_str) = basename.to_str() {
+ if let Some(list) = ignore_list {
+ for item in list.iter() {
+ // Handle optional glob patterns here. This allows us to ignore `bazel-*` patterns.
+ if item.ends_with('*') && base_str.starts_with(item.trim_end_matches('*')) {
+ continue;
+ }
+
+ // Finally, simply compare the string
+ if *item == base_str {
+ continue;
+ }
+ }
+ }
+ }
+
+ let link_src = source.join(&basename);
+ let link_dest = dest.join(&basename);
+ symlink(&link_src, &link_dest).context(format!(
+ "Failed to create symlink: {} -> {}",
+ link_src.display(),
+ link_dest.display()
+ ))?;
+ }
+
+ Ok(())
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use std::fs;
+ use std::fs::File;
+ use std::str::FromStr;
+
+ use cargo_metadata::{MetadataCommand, PackageId};
+
+ use crate::splicing::ExtraManifestInfo;
+ use crate::utils::starlark::Label;
+
+ /// Clone and compare two items after calling `.sort()` on them.
+ macro_rules! assert_sort_eq {
+ ($left:expr, $right:expr $(,)?) => {
+ let mut left = $left.clone();
+ left.sort();
+ let mut right = $right.clone();
+ right.sort();
+ assert_eq!(left, right);
+ };
+ }
+
+ /// Get cargo and rustc binaries the Bazel way
+ #[cfg(not(feature = "cargo"))]
+ fn get_cargo_and_rustc_paths() -> (PathBuf, PathBuf) {
+ let runfiles = runfiles::Runfiles::create().unwrap();
+ let cargo_path = runfiles.rlocation(concat!("rules_rust/", env!("CARGO")));
+ let rustc_path = runfiles.rlocation(concat!("rules_rust/", env!("RUSTC")));
+
+ (cargo_path, rustc_path)
+ }
+
+ /// Get cargo and rustc binaries the Cargo way
+ #[cfg(feature = "cargo")]
+ fn get_cargo_and_rustc_paths() -> (PathBuf, PathBuf) {
+ (PathBuf::from("cargo"), PathBuf::from("rustc"))
+ }
+
+ fn generate_metadata(manifest_path: &Path) -> cargo_metadata::Metadata {
+ let manifest_dir = manifest_path.parent().unwrap_or_else(|| {
+ panic!(
+ "The given manifest has no parent directory: {}",
+ manifest_path.display()
+ )
+ });
+
+ let (cargo_path, rustc_path) = get_cargo_and_rustc_paths();
+
+ let output = MetadataCommand::new()
+ .cargo_path(cargo_path)
+ // Cargo detects config files based on `pwd` when running so
+ // to ensure user provided Cargo config files are used, it's
+ // critical to set the working directory to the manifest dir.
+ .current_dir(manifest_dir)
+ .manifest_path(manifest_path)
+ .other_options(["--offline".to_owned()])
+ .cargo_command()
+ .env("RUSTC", rustc_path)
+ .output()
+ .unwrap();
+
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ assert!(output.status.success());
+ }
+
+ let stdout = String::from_utf8(output.stdout).unwrap();
+
+ assert!(stdout
+ .lines()
+ .find(|line| line.starts_with('{'))
+ .ok_or(cargo_metadata::Error::NoJson)
+ .is_ok());
+
+ MetadataCommand::parse(stdout).unwrap()
+ }
+
+ fn mock_cargo_toml(path: &Path, name: &str) -> cargo_toml::Manifest {
+ let manifest = cargo_toml::Manifest::from_str(&textwrap::dedent(&format!(
+ r#"
+ [package]
+ name = "{}"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ "#,
+ name
+ )))
+ .unwrap();
+
+ fs::create_dir_all(path.parent().unwrap()).unwrap();
+ fs::write(path, toml::to_string(&manifest).unwrap()).unwrap();
+
+ manifest
+ }
+
+ fn mock_extra_manifest_digest(cache_dir: &Path) -> ExtraManifestsManifest {
+ ExtraManifestsManifest {
+ manifests: vec![{
+ let manifest_path = cache_dir.join("extra_pkg").join("Cargo.toml");
+ mock_cargo_toml(&manifest_path, "extra_pkg");
+
+ ExtraManifestInfo {
+ manifest: manifest_path,
+ url: "https://crates.io/".to_owned(),
+ sha256: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
+ .to_owned(),
+ }
+ }],
+ }
+ }
+
+ /// This json object is tightly coupled to [mock_extra_manifest_digest]
+ fn mock_workspace_metadata(
+ include_extra_member: bool,
+ workspace_prefix: Option<&str>,
+ ) -> serde_json::Value {
+ let mut obj = if include_extra_member {
+ serde_json::json!({
+ "cargo-bazel": {
+ "package_prefixes": {},
+ "sources": {
+ "extra_pkg 0.0.1": {
+ "sha256": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ "url": "https://crates.io/"
+ }
+ }
+ }
+ })
+ } else {
+ serde_json::json!({
+ "cargo-bazel": {
+ "package_prefixes": {},
+ "sources": {}
+ }
+ })
+ };
+ if let Some(workspace_prefix) = workspace_prefix {
+ obj.as_object_mut().unwrap()["cargo-bazel"]
+ .as_object_mut()
+ .unwrap()
+ .insert("workspace_prefix".to_owned(), workspace_prefix.into());
+ }
+ obj
+ }
+
+ fn mock_splicing_manifest_with_workspace() -> (SplicingManifest, tempfile::TempDir) {
+ let mut splicing_manifest = SplicingManifest::default();
+ let cache_dir = tempfile::tempdir().unwrap();
+
+ // Write workspace members
+ for pkg in &["sub_pkg_a", "sub_pkg_b"] {
+ let manifest_path = cache_dir
+ .as_ref()
+ .join("root_pkg")
+ .join(pkg)
+ .join("Cargo.toml");
+ mock_cargo_toml(&manifest_path, pkg);
+
+ splicing_manifest.manifests.insert(
+ manifest_path,
+ Label::from_str(&format!("//{}:Cargo.toml", pkg)).unwrap(),
+ );
+ }
+
+ // Create the root package with a workspace definition
+ let manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ "sub_pkg_a",
+ "sub_pkg_b",
+ ]
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+
+ let workspace_root = cache_dir.as_ref();
+ {
+ File::create(workspace_root.join("WORKSPACE.bazel")).unwrap();
+ }
+ let root_pkg = workspace_root.join("root_pkg");
+ let manifest_path = root_pkg.join("Cargo.toml");
+ fs::create_dir_all(&manifest_path.parent().unwrap()).unwrap();
+ fs::write(&manifest_path, toml::to_string(&manifest).unwrap()).unwrap();
+
+ let sub_pkg_a = root_pkg.join("sub_pkg_a");
+ let sub_pkg_b = root_pkg.join("sub_pkg_b");
+ {
+ fs::create_dir_all(&sub_pkg_a).unwrap();
+ File::create(sub_pkg_a.join("BUILD.bazel")).unwrap();
+
+ fs::create_dir_all(&sub_pkg_b).unwrap();
+ File::create(sub_pkg_b.join("BUILD.bazel")).unwrap();
+ }
+
+ splicing_manifest.manifests.insert(
+ manifest_path,
+ Label::from_str("//pkg_root:Cargo.toml").unwrap(),
+ );
+
+ (splicing_manifest, cache_dir)
+ }
+
+ fn mock_splicing_manifest_with_workspace_in_root() -> (SplicingManifest, tempfile::TempDir) {
+ let mut splicing_manifest = SplicingManifest::default();
+ let cache_dir = tempfile::tempdir().unwrap();
+
+ // Write workspace members
+ for pkg in &["sub_pkg_a", "sub_pkg_b"] {
+ let manifest_path = cache_dir.as_ref().join(pkg).join("Cargo.toml");
+ mock_cargo_toml(&manifest_path, pkg);
+
+ splicing_manifest.manifests.insert(
+ manifest_path,
+ Label::from_str(&format!("//{}:Cargo.toml", pkg)).unwrap(),
+ );
+ }
+
+ // Create the root package with a workspace definition
+ let manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ "sub_pkg_a",
+ "sub_pkg_b",
+ ]
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+
+ let workspace_root = cache_dir.as_ref();
+ {
+ File::create(workspace_root.join("WORKSPACE.bazel")).unwrap();
+ }
+ let manifest_path = workspace_root.join("Cargo.toml");
+ fs::create_dir_all(&manifest_path.parent().unwrap()).unwrap();
+ fs::write(&manifest_path, toml::to_string(&manifest).unwrap()).unwrap();
+
+ let sub_pkg_a = workspace_root.join("sub_pkg_a");
+ let sub_pkg_b = workspace_root.join("sub_pkg_b");
+ {
+ fs::create_dir_all(&sub_pkg_a).unwrap();
+ File::create(sub_pkg_a.join("BUILD.bazel")).unwrap();
+
+ fs::create_dir_all(&sub_pkg_b).unwrap();
+ File::create(sub_pkg_b.join("BUILD.bazel")).unwrap();
+ }
+
+ splicing_manifest
+ .manifests
+ .insert(manifest_path, Label::from_str("//:Cargo.toml").unwrap());
+
+ (splicing_manifest, cache_dir)
+ }
+
+ fn mock_splicing_manifest_with_package() -> (SplicingManifest, tempfile::TempDir) {
+ let mut splicing_manifest = SplicingManifest::default();
+ let cache_dir = tempfile::tempdir().unwrap();
+
+ // Add an additional package
+ let manifest_path = cache_dir.as_ref().join("root_pkg").join("Cargo.toml");
+ mock_cargo_toml(&manifest_path, "root_pkg");
+ splicing_manifest
+ .manifests
+ .insert(manifest_path, Label::from_str("//:Cargo.toml").unwrap());
+
+ (splicing_manifest, cache_dir)
+ }
+
+ fn mock_splicing_manifest_with_multi_package() -> (SplicingManifest, tempfile::TempDir) {
+ let mut splicing_manifest = SplicingManifest::default();
+ let cache_dir = tempfile::tempdir().unwrap();
+
+ // Add an additional package
+ for pkg in &["pkg_a", "pkg_b", "pkg_c"] {
+ let manifest_path = cache_dir.as_ref().join(pkg).join("Cargo.toml");
+ mock_cargo_toml(&manifest_path, pkg);
+ splicing_manifest
+ .manifests
+ .insert(manifest_path, Label::from_str("//:Cargo.toml").unwrap());
+ }
+
+ (splicing_manifest, cache_dir)
+ }
+
+ fn new_package_id(name: &str, workspace_root: &Path, is_root: bool) -> PackageId {
+ let mut workspace_root = workspace_root.display().to_string();
+
+ // On windows, make sure we normalize the path to match what Cargo would
+ // otherwise use to populate metadata.
+ if cfg!(target_os = "windows") {
+ workspace_root = format!("/{}", workspace_root.replace("\\", "/"))
+ };
+
+ if is_root {
+ PackageId {
+ repr: format!("{} 0.0.1 (path+file://{})", name, workspace_root),
+ }
+ } else {
+ PackageId {
+ repr: format!("{} 0.0.1 (path+file://{}/{})", name, workspace_root, name,),
+ }
+ }
+ }
+
+ #[test]
+ fn splice_workspace() {
+ let (splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace_in_root();
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("sub_pkg_a", workspace_root.as_ref(), false),
+ new_package_id("sub_pkg_b", workspace_root.as_ref(), false),
+ new_package_id("root_pkg", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(false, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn splice_workspace_in_root() {
+ let (splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace_in_root();
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("sub_pkg_a", workspace_root.as_ref(), false),
+ new_package_id("sub_pkg_b", workspace_root.as_ref(), false),
+ new_package_id("root_pkg", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(false, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn splice_workspace_report_missing_members() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace();
+
+ // Remove everything but the root manifest
+ splicing_manifest
+ .manifests
+ .retain(|_, label| *label == Label::from_str("//pkg_root:Cargo.toml").unwrap());
+ assert_eq!(splicing_manifest.manifests.len(), 1);
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace();
+
+ assert!(workspace_manifest.is_err());
+
+ // Ensure both the missing manifests are mentioned in the error string
+ let err_str = format!("{:?}", &workspace_manifest);
+ assert!(
+ err_str.contains("Some manifests are not being tracked")
+ && err_str.contains("//root_pkg/sub_pkg_a:Cargo.toml")
+ && err_str.contains("//root_pkg/sub_pkg_b:Cargo.toml")
+ );
+ }
+
+ #[test]
+ fn splice_workspace_report_external_workspace_members() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace();
+
+ // Add a new package from an existing external workspace
+ let external_workspace_root = tempfile::tempdir().unwrap();
+ let external_manifest = external_workspace_root
+ .as_ref()
+ .join("external_workspace_member")
+ .join("Cargo.toml");
+ fs::create_dir_all(external_manifest.parent().unwrap()).unwrap();
+ fs::write(
+ &external_manifest,
+ &textwrap::dedent(
+ r#"
+ [package]
+ name = "external_workspace_member"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+
+ [dependencies]
+ neighbor = { path = "../neighbor" }
+ "#,
+ ),
+ )
+ .unwrap();
+
+ splicing_manifest.manifests.insert(
+ external_manifest.clone(),
+ Label::from_str("@remote_dep//external_workspace_member:Cargo.toml").unwrap(),
+ );
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace();
+
+ assert!(workspace_manifest.is_err());
+
+ // Ensure both the external workspace member
+ let err_str = format!("{:?}", &workspace_manifest);
+ let bytes_str = format!("{:?}", external_manifest.to_string_lossy());
+ assert!(
+ err_str
+ .contains("A package was provided that appears to be a part of another workspace.")
+ && err_str.contains(&bytes_str)
+ );
+ }
+
+ #[test]
+ fn splice_package() {
+ let (splicing_manifest, _cache_dir) = mock_splicing_manifest_with_package();
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![new_package_id("root_pkg", workspace_root.as_ref(), true)]
+ );
+
+ // Ensure the workspace metadata annotations are not populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(false, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn splice_multi_package() {
+ let (splicing_manifest, _cache_dir) = mock_splicing_manifest_with_multi_package();
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Check the default resolver version
+ let cargo_manifest = cargo_toml::Manifest::from_str(
+ &fs::read_to_string(workspace_manifest.as_path_buf()).unwrap(),
+ )
+ .unwrap();
+ assert!(cargo_manifest.workspace.is_some());
+ assert_eq!(
+ cargo_manifest.workspace.unwrap().resolver,
+ Some(cargo_toml::Resolver::V1)
+ );
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("pkg_a", workspace_root.as_ref(), false),
+ new_package_id("pkg_b", workspace_root.as_ref(), false),
+ new_package_id("pkg_c", workspace_root.as_ref(), false),
+ // Multi package renderings always add a root package
+ new_package_id("direct-cargo-bazel-deps", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(false, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn splice_multi_package_with_resolver() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_multi_package();
+
+ // Update the resolver version
+ splicing_manifest.resolver_version = cargo_toml::Resolver::V2;
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ ExtraManifestsManifest::default(),
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Check the specified resolver version
+ let cargo_manifest = cargo_toml::Manifest::from_str(
+ &fs::read_to_string(workspace_manifest.as_path_buf()).unwrap(),
+ )
+ .unwrap();
+ assert!(cargo_manifest.workspace.is_some());
+ assert_eq!(
+ cargo_manifest.workspace.unwrap().resolver,
+ Some(cargo_toml::Resolver::V2)
+ );
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("pkg_a", workspace_root.as_ref(), false),
+ new_package_id("pkg_b", workspace_root.as_ref(), false),
+ new_package_id("pkg_c", workspace_root.as_ref(), false),
+ // Multi package renderings always add a root package
+ new_package_id("direct-cargo-bazel-deps", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(false, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn extra_workspace_member_with_package() {
+ let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_package();
+
+ // Add the extra workspace member
+ let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ extra_manifests_manifest,
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("extra_pkg", workspace_root.as_ref(), false),
+ new_package_id("root_pkg", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(true, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn extra_workspace_member_with_workspace() {
+ let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_workspace();
+
+ // Add the extra workspace member
+ let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ extra_manifests_manifest,
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("sub_pkg_a", workspace_root.as_ref(), false),
+ new_package_id("sub_pkg_b", workspace_root.as_ref(), false),
+ new_package_id("extra_pkg", workspace_root.as_ref(), false),
+ new_package_id("root_pkg", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(true, Some("pkg_root"))
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+
+ #[test]
+ fn extra_workspace_member_with_multi_package() {
+ let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_multi_package();
+
+ // Add the extra workspace member
+ let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest = Splicer::new(
+ workspace_root.as_ref().to_path_buf(),
+ splicing_manifest,
+ extra_manifests_manifest,
+ )
+ .unwrap()
+ .splice_workspace()
+ .unwrap();
+
+ // Ensure metadata is valid
+ let metadata = generate_metadata(workspace_manifest.as_path_buf());
+ assert_sort_eq!(
+ metadata.workspace_members,
+ vec![
+ new_package_id("pkg_a", workspace_root.as_ref(), false),
+ new_package_id("pkg_b", workspace_root.as_ref(), false),
+ new_package_id("pkg_c", workspace_root.as_ref(), false),
+ new_package_id("extra_pkg", workspace_root.as_ref(), false),
+ // Multi package renderings always add a root package
+ new_package_id("direct-cargo-bazel-deps", workspace_root.as_ref(), true),
+ ]
+ );
+
+ // Ensure the workspace metadata annotations are populated
+ assert_eq!(
+ metadata.workspace_metadata,
+ mock_workspace_metadata(true, None)
+ );
+
+ // Ensure lockfile was successfully spliced
+ cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ }
+}
diff --git a/crate_universe/src/test.rs b/crate_universe/src/test.rs
new file mode 100644
index 0000000..5b67351
--- /dev/null
+++ b/crate_universe/src/test.rs
@@ -0,0 +1,153 @@
+//! A module containing common test helpers
+
+pub fn mock_cargo_metadata_package() -> cargo_metadata::Package {
+ serde_json::from_value(serde_json::json!({
+ "name": "mock-pkg",
+ "version": "3.3.3",
+ "id": "mock-pkg 3.3.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "license": "Unlicense/MIT",
+ "license_file": null,
+ "description": "Fast multiple substring searching.",
+ "source": "registry+https://github.com/rust-lang/crates.io-index",
+ "dependencies": [],
+ "targets": [],
+ "features": {},
+ "manifest_path": "/tmp/mock-pkg-3.3.3/Cargo.toml",
+ "metadata": null,
+ "publish": null,
+ "authors": [],
+ "categories": [],
+ "keywords": [],
+ "readme": "README.md",
+ "repository": "",
+ "homepage": "",
+ "documentation": null,
+ "edition": "2021",
+ "links": null,
+ "default_run": null
+ }))
+ .unwrap()
+}
+
+pub fn mock_cargo_lock_package() -> cargo_lock::Package {
+ toml::from_str(&textwrap::dedent(
+ r#"
+ name = "mock-pkg"
+ version = "3.3.3"
+ source = "registry+https://github.com/rust-lang/crates.io-index"
+ checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
+ dependencies = []
+ "#,
+ ))
+ .unwrap()
+}
+
+pub mod metadata {
+ pub fn alias() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/aliases/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn build_scripts() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/build_scripts/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn crate_types() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/crate_types/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn multi_cfg_dep() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/multi_cfg_dep/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn no_deps() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/no_deps/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn common() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/common/metadata.json"
+ )))
+ .unwrap()
+ }
+
+ pub fn git_repos() -> cargo_metadata::Metadata {
+ serde_json::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/git_repos/metadata.json"
+ )))
+ .unwrap()
+ }
+}
+
+pub mod lockfile {
+ use std::str::FromStr;
+
+ pub fn alias() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/aliases/Cargo.lock"
+ )))
+ .unwrap()
+ }
+
+ pub fn build_scripts() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/build_scripts/Cargo.lock"
+ )))
+ .unwrap()
+ }
+
+ pub fn crate_types() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/crate_types/Cargo.lock"
+ )))
+ .unwrap()
+ }
+
+ pub fn no_deps() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/no_deps/Cargo.lock"
+ )))
+ .unwrap()
+ }
+
+ pub fn common() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/common/Cargo.lock"
+ )))
+ .unwrap()
+ }
+
+ pub fn git_repos() -> cargo_lock::Lockfile {
+ cargo_lock::Lockfile::from_str(include_str!(concat!(
+ env!("CARGO_MANIFEST_DIR"),
+ "/test_data/metadata/git_repos/Cargo.lock"
+ )))
+ .unwrap()
+ }
+}
diff --git a/crate_universe/src/utils.rs b/crate_universe/src/utils.rs
new file mode 100644
index 0000000..8953c81
--- /dev/null
+++ b/crate_universe/src/utils.rs
@@ -0,0 +1,15 @@
+//! Common utilities
+
+pub mod starlark;
+
+/// Convert a string into a valid crate module name by applying transforms to invalid characters
+pub fn sanitize_module_name(name: &str) -> String {
+ name.replace('-', "_")
+}
+
+/// Some character which may be present in version IDs are not valid
+/// in Bazel repository names. This converts invalid characters. See
+/// [RepositoryName.java](https://github.com/bazelbuild/bazel/blob/4.0.0/src/main/java/com/google/devtools/build/lib/cmdline/RepositoryName.java#L42)
+pub fn sanitize_repository_name(name: &str) -> String {
+ name.replace('+', "-")
+}
diff --git a/crate_universe/src/utils/starlark.rs b/crate_universe/src/utils/starlark.rs
new file mode 100644
index 0000000..57007b7
--- /dev/null
+++ b/crate_universe/src/utils/starlark.rs
@@ -0,0 +1,12 @@
+//! A module for representations of starlark constructs
+
+mod glob;
+mod label;
+mod select;
+
+pub use glob::*;
+pub use label::*;
+pub use select::*;
+
+pub type SelectStringList = SelectList<String>;
+pub type SelectStringDict = SelectDict<String>;
diff --git a/crate_universe/src/utils/starlark/glob.rs b/crate_universe/src/utils/starlark/glob.rs
new file mode 100644
index 0000000..23b17a5
--- /dev/null
+++ b/crate_universe/src/utils/starlark/glob.rs
@@ -0,0 +1,16 @@
+use serde::{Deserialize, Serialize};
+
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Clone)]
+pub struct Glob {
+ pub include: Vec<String>,
+ pub exclude: Vec<String>,
+}
+
+impl Glob {
+ pub fn new_rust_srcs() -> Self {
+ Self {
+ include: vec!["**/*.rs".to_owned()],
+ ..Default::default()
+ }
+ }
+}
diff --git a/crate_universe/src/utils/starlark/label.rs b/crate_universe/src/utils/starlark/label.rs
new file mode 100644
index 0000000..1716944
--- /dev/null
+++ b/crate_universe/src/utils/starlark/label.rs
@@ -0,0 +1,317 @@
+use std::fmt::{self, Display};
+use std::path::Path;
+use std::str::FromStr;
+
+use anyhow::{anyhow, bail, Context, Result};
+use regex::Regex;
+use serde::de::Visitor;
+use serde::{Deserialize, Serialize, Serializer};
+
+#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone)]
+pub struct Label {
+ pub repository: Option<String>,
+ pub package: Option<String>,
+ pub target: String,
+}
+
+impl FromStr for Label {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let re = Regex::new(r"^(@[\w\d\-_\.]*)?/{0,2}([\w\d\-_\./]+)?:?([\+\w\d\-_\./]+)$")?;
+ let cap = re
+ .captures(s)
+ .with_context(|| format!("Failed to parse label from string: {}", s))?;
+
+ let repository = cap
+ .get(1)
+ .map(|m| m.as_str().trim_start_matches('@').to_owned());
+ let package = cap.get(2).map(|m| m.as_str().to_owned());
+ let mut target = cap.get(3).map(|m| m.as_str().to_owned());
+
+ if target.is_none() {
+ if let Some(pkg) = &package {
+ target = Some(pkg.clone());
+ } else if let Some(repo) = &repository {
+ target = Some(repo.clone())
+ } else {
+ bail!("The label is missing a label")
+ }
+ }
+
+ // The target should be set at this point
+ let target = target.unwrap();
+
+ Ok(Self {
+ repository,
+ package,
+ target,
+ })
+ }
+}
+
+impl Display for Label {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ let mut label = String::new();
+
+ // Add the repository
+ if let Some(repo) = &self.repository {
+ label = format!("@{}", repo);
+ }
+
+ // Add the package
+ if let Some(pkg) = &self.package {
+ label = format!("{}//{}", label, pkg);
+ }
+
+ write!(f, "{}:{}", &label, &self.target,)
+ }
+}
+
+impl Label {
+ /// Generates a label appropriate for the passed Path by walking the filesystem to identify its
+ /// workspace and package.
+ pub fn from_absolute_path(p: &Path) -> Result<Self, anyhow::Error> {
+ let mut workspace_root = None;
+ let mut package_root = None;
+ for ancestor in p.ancestors().skip(1) {
+ if package_root.is_none()
+ && (ancestor.join("BUILD").exists() || ancestor.join("BUILD.bazel").exists())
+ {
+ package_root = Some(ancestor);
+ }
+ if workspace_root.is_none()
+ && (ancestor.join("WORKSPACE").exists()
+ || ancestor.join("WORKSPACE.bazel").exists())
+ {
+ workspace_root = Some(ancestor);
+ break;
+ }
+ }
+ match (workspace_root, package_root) {
+ (Some(workspace_root), Some(package_root)) => {
+ // These unwraps are safe by construction of the ancestors and prefix calls which set up these paths.
+ let target = p.strip_prefix(package_root).unwrap();
+ let workspace_relative = p.strip_prefix(workspace_root).unwrap();
+ let mut package_path = workspace_relative.to_path_buf();
+ for _ in target.components() {
+ package_path.pop();
+ }
+
+ let package = if package_path.components().count() > 0 {
+ Some(path_to_label_part(&package_path)?)
+ } else {
+ None
+ };
+ let target = path_to_label_part(target)?;
+
+ Ok(Label {
+ repository: None,
+ package,
+ target,
+ })
+ }
+ (Some(_workspace_root), None) => {
+ bail!(
+ "Could not identify package for path {}. Maybe you need to add a BUILD.bazel file.",
+ p.display()
+ );
+ }
+ _ => {
+ bail!("Could not identify workspace for path {}", p.display());
+ }
+ }
+ }
+}
+
+/// Converts a path to a forward-slash-delimited label-appropriate path string.
+fn path_to_label_part(path: &Path) -> Result<String, anyhow::Error> {
+ let components: Result<Vec<_>, _> = path
+ .components()
+ .map(|c| {
+ c.as_os_str().to_str().ok_or_else(|| {
+ anyhow!(
+ "Found non-UTF8 component turning path into label: {}",
+ path.display()
+ )
+ })
+ })
+ .collect();
+ Ok(components?.join("/"))
+}
+
+impl Serialize for Label {
+ fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+ where
+ S: Serializer,
+ {
+ serializer.serialize_str(&self.repr())
+ }
+}
+
+struct LabelVisitor;
+impl<'de> Visitor<'de> for LabelVisitor {
+ type Value = Label;
+
+ fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
+ formatter.write_str("Expected string value of `{name} {version}`.")
+ }
+
+ fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
+ where
+ E: serde::de::Error,
+ {
+ Label::from_str(v).map_err(E::custom)
+ }
+}
+
+impl<'de> Deserialize<'de> for Label {
+ fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+ where
+ D: serde::Deserializer<'de>,
+ {
+ deserializer.deserialize_str(LabelVisitor)
+ }
+}
+
+impl Label {
+ pub fn repr(&self) -> String {
+ self.to_string()
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+ use spectral::prelude::*;
+ use std::fs::{create_dir_all, File};
+ use tempfile::tempdir;
+
+ #[test]
+ fn full_label() {
+ let label = Label::from_str("@repo//package/sub_package:target").unwrap();
+ assert_eq!(label.repository.unwrap(), "repo");
+ assert_eq!(label.package.unwrap(), "package/sub_package");
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn no_repository() {
+ let label = Label::from_str("//package:target").unwrap();
+ assert_eq!(label.repository, None);
+ assert_eq!(label.package.unwrap(), "package");
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn no_slashes() {
+ let label = Label::from_str("package:target").unwrap();
+ assert_eq!(label.repository, None);
+ assert_eq!(label.package.unwrap(), "package");
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn root_label() {
+ let label = Label::from_str("@repo//:target").unwrap();
+ assert_eq!(label.repository.unwrap(), "repo");
+ assert_eq!(label.package, None);
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn root_label_no_repository() {
+ let label = Label::from_str("//:target").unwrap();
+ assert_eq!(label.repository, None);
+ assert_eq!(label.package, None);
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn root_label_no_slashes() {
+ let label = Label::from_str(":target").unwrap();
+ assert_eq!(label.repository, None);
+ assert_eq!(label.package, None);
+ assert_eq!(label.target, "target");
+ }
+
+ #[test]
+ fn full_label_with_slash_after_colon() {
+ let label = Label::from_str("@repo//package/sub_package:subdir/target").unwrap();
+ assert_eq!(label.repository.unwrap(), "repo");
+ assert_eq!(label.package.unwrap(), "package/sub_package");
+ assert_eq!(label.target, "subdir/target");
+ }
+
+ #[test]
+ fn invalid_double_colon() {
+ assert!(Label::from_str("::target").is_err());
+ }
+
+ #[test]
+ fn invalid_double_at() {
+ assert!(Label::from_str("@@repo//pkg:target").is_err());
+ }
+
+ #[test]
+ #[ignore = "This currently fails. The Label parsing logic needs to be updated"]
+ fn invalid_no_double_slash() {
+ assert!(Label::from_str("@repo:target").is_err());
+ }
+
+ #[test]
+ fn from_absolute_path_exists() {
+ let dir = tempdir().unwrap();
+ let workspace = dir.path().join("WORKSPACE.bazel");
+ let build_file = dir.path().join("parent").join("child").join("BUILD.bazel");
+ let subdir = dir.path().join("parent").join("child").join("grandchild");
+ let actual_file = subdir.join("greatgrandchild");
+ create_dir_all(subdir).unwrap();
+ {
+ File::create(&workspace).unwrap();
+ File::create(&build_file).unwrap();
+ File::create(&actual_file).unwrap();
+ }
+ let label = Label::from_absolute_path(&actual_file).unwrap();
+ assert_eq!(label.repository, None);
+ assert_eq!(label.package.unwrap(), "parent/child");
+ assert_eq!(label.target, "grandchild/greatgrandchild")
+ }
+
+ #[test]
+ fn from_absolute_path_no_workspace() {
+ let dir = tempdir().unwrap();
+ let build_file = dir.path().join("parent").join("child").join("BUILD.bazel");
+ let subdir = dir.path().join("parent").join("child").join("grandchild");
+ let actual_file = subdir.join("greatgrandchild");
+ create_dir_all(subdir).unwrap();
+ {
+ File::create(&build_file).unwrap();
+ File::create(&actual_file).unwrap();
+ }
+ let err = Label::from_absolute_path(&actual_file)
+ .unwrap_err()
+ .to_string();
+ assert_that(&err).contains("Could not identify workspace");
+ assert_that(&err).contains(format!("{}", actual_file.display()).as_str());
+ }
+
+ #[test]
+ fn from_absolute_path_no_build_file() {
+ let dir = tempdir().unwrap();
+ let workspace = dir.path().join("WORKSPACE.bazel");
+ let subdir = dir.path().join("parent").join("child").join("grandchild");
+ let actual_file = subdir.join("greatgrandchild");
+ create_dir_all(subdir).unwrap();
+ {
+ File::create(&workspace).unwrap();
+ File::create(&actual_file).unwrap();
+ }
+ let err = Label::from_absolute_path(&actual_file)
+ .unwrap_err()
+ .to_string();
+ assert_that(&err).contains("Could not identify package");
+ assert_that(&err).contains("Maybe you need to add a BUILD.bazel file");
+ assert_that(&err).contains(format!("{}", actual_file.display()).as_str());
+ }
+}
diff --git a/crate_universe/src/utils/starlark/select.rs b/crate_universe/src/utils/starlark/select.rs
new file mode 100644
index 0000000..4a8a3cc
--- /dev/null
+++ b/crate_universe/src/utils/starlark/select.rs
@@ -0,0 +1,166 @@
+use serde::{Deserialize, Serialize};
+use std::collections::{btree_set, BTreeMap, BTreeSet};
+use std::iter::once;
+
+pub trait SelectMap<T, U> {
+ // A selectable should also implement a `map` function allowing one type of selectable
+ // to be mutated into another. However, the approach I'm looking for requires GAT
+ // (Generic Associated Types) which are not yet stable.
+ // https://github.com/rust-lang/rust/issues/44265
+ type Mapped;
+ fn map<F: Copy + Fn(T) -> U>(self, func: F) -> Self::Mapped;
+}
+
+pub trait Select<T> {
+ /// Gather a list of all conditions currently set on the selectable. A conditional
+ /// would be the key of the select statement.
+ fn configurations(&self) -> BTreeSet<Option<&String>>;
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Clone)]
+pub struct SelectList<T: Ord> {
+ common: BTreeSet<T>,
+ selects: BTreeMap<String, BTreeSet<T>>,
+}
+
+impl<T: Ord> Default for SelectList<T> {
+ fn default() -> Self {
+ Self {
+ common: BTreeSet::new(),
+ selects: BTreeMap::new(),
+ }
+ }
+}
+
+impl<T: Ord> SelectList<T> {
+ // TODO: This should probably be added to the [Select] trait
+ pub fn insert(&mut self, value: T, configuration: Option<String>) {
+ match configuration {
+ None => {
+ self.common.insert(value);
+ }
+ Some(cfg) => {
+ match self.selects.get_mut(&cfg) {
+ None => {
+ let mut set = BTreeSet::new();
+ set.insert(value);
+ self.selects.insert(cfg, set);
+ }
+ Some(set) => {
+ set.insert(value);
+ }
+ };
+ }
+ };
+ }
+
+ // TODO: This should probably be added to the [Select] trait
+ pub fn get_iter<'a>(&'a self, config: Option<&String>) -> Option<btree_set::Iter<T>> {
+ match config {
+ Some(conf) => self.selects.get(conf).map(|set| set.iter()),
+ None => Some(self.common.iter()),
+ }
+ }
+
+ /// Determine whether or not the select should be serialized
+ pub fn should_skip_serializing(&self) -> bool {
+ self.common.is_empty() && self.selects.is_empty()
+ }
+}
+
+impl<T: Ord> Select<T> for SelectList<T> {
+ fn configurations(&self) -> BTreeSet<Option<&String>> {
+ let configs = self.selects.keys().map(Some);
+ match self.common.is_empty() {
+ true => configs.collect(),
+ false => configs.chain(once(None)).collect(),
+ }
+ }
+}
+
+impl<T: Ord, U: Ord> SelectMap<T, U> for SelectList<T> {
+ type Mapped = SelectList<U>;
+
+ fn map<F: Copy + Fn(T) -> U>(self, func: F) -> Self::Mapped {
+ SelectList {
+ common: self.common.into_iter().map(func).collect(),
+ selects: self
+ .selects
+ .into_iter()
+ .map(|(key, map)| (key, map.into_iter().map(func).collect()))
+ .collect(),
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize, Clone)]
+pub struct SelectDict<T: Ord> {
+ common: BTreeMap<String, T>,
+ selects: BTreeMap<String, BTreeMap<String, T>>,
+}
+
+impl<T: Ord> Default for SelectDict<T> {
+ fn default() -> Self {
+ Self {
+ common: BTreeMap::new(),
+ selects: BTreeMap::new(),
+ }
+ }
+}
+
+impl<T: Ord> SelectDict<T> {
+ // TODO: This should probably be added to the [Select] trait
+ pub fn insert(&mut self, value: BTreeMap<String, T>, configuration: Option<String>) {
+ match configuration {
+ None => {
+ self.common.extend(value);
+ }
+ Some(cfg) => {
+ match self.selects.get_mut(&cfg) {
+ None => {
+ let mut set = BTreeMap::new();
+ set.extend(value);
+ self.selects.insert(cfg, set);
+ }
+ Some(set) => {
+ set.extend(value);
+ }
+ };
+ }
+ };
+ }
+
+ /// Determine whether or not the select should be serialized
+ pub fn should_skip_serializing(&self) -> bool {
+ self.common.is_empty() && self.selects.is_empty()
+ }
+}
+
+impl<T: Ord> Select<T> for SelectDict<T> {
+ fn configurations(&self) -> BTreeSet<Option<&String>> {
+ let configs = self.selects.keys().map(Some);
+ match self.common.is_empty() {
+ true => configs.collect(),
+ false => configs.chain(once(None)).collect(),
+ }
+ }
+}
+
+impl<T: Ord, U: Ord> SelectMap<T, U> for SelectDict<T> {
+ type Mapped = SelectDict<U>;
+
+ fn map<F: Copy + Fn(T) -> U>(self, func: F) -> Self::Mapped {
+ SelectDict {
+ common: self
+ .common
+ .into_iter()
+ .map(|(key, val)| (key, func(val)))
+ .collect(),
+ selects: self
+ .selects
+ .into_iter()
+ .map(|(key, map)| (key, map.into_iter().map(|(k, v)| (k, func(v))).collect()))
+ .collect(),
+ }
+ }
+}