Squashed 'third_party/rules_rust/' changes from bf59038cac..078c6908fc
078c6908fc add cc_common.link support for rust_library and rust_test (#1490)
c07aef0287 Skip supplying rpaths on Fuchsia (#1511)
6ee7c80bdb Propagate rustc_env{,_files} from rust_test.crate (#1443)
1cd0788d2a Apply get_lib_name correctly to the C++ runtime libraries (#1508)
90808f0dc4 Minor cleanup to documentation (#1505)
735640f2df Enable rust-analyzer tests on windows. (#1506)
0f34573166 Updated rules_rust to version 0.9.0 (#1503)
9b61b49934 Promoted crate_universe to non-experimental (#1504)
76360dd354 Implement rules archive release artifact in github action. (#1501)
4e5fac5980 Do not pass `--Clink-arg=-l` for libstd and libtest (#1500)
6c38934636 pipelining: add the ability to disable pipelining for a single rule. (#1499)
867fc37c17 rules_rust: enable pipelined compilation. (#1275)
c97f255dfe Delete deprecated targets (#1496)
43b42884a7 Updated examples to use crate_universe (#1494)
0ffde973e8 Updated `//util/import` to use crate_universe (#1492)
83a03ab03e Updated proto rules to fetch dependencies using crate_universe (#1491)
67e204ff22 fix: `rust_doc_test` failure to find params file (#1418)
0fc834bdfa Updated all toolchain_type definitions to be named `toolchain_type`. (#1479)
3be056a7a3 toolchain files: ensure test depends on std (#1486)
228ebfa6eb Updated rules_rust to version `0.8.1`. (#1484)
685dfda497 Fixed use of rust-analyzer with rust_static_library and rust_shared_library (#1482)
2d7f94543f Fix rust-analyzer being unable to find rust sysroot sources. (#1483)
81a77acde2 Updated rules_rust to version `0.8.0`. (#1472)
caad908848 Give useful error on missing workspace manifest (#1475)
0e86b9dd30 Added `rust_analyzer_toolchain` and repository rules for creating one (#1455)
838e4ea828 Update docs on lockfiles (#1477)
fce1222628 Fix typo in function name (#1478)
1929bfbc3e Added Rust version 1.62.1 (#1476)
9a8e878d3d Fix `rust_binary_without_process_wrapper` build with `--stamp` (#1473)
25baabc205 Updated bindgen version (#1470)
8c9d5c7051 Updated rust-analyzer generator to use clap (#1471)
6d8009dbc8 Update `//bindgen` to use `crate_universe` (#1440)
67c97d44ff Updated `tools/rust_analyzer` to use `crate_universe`. (#1448)
6c285eb28e Updated `wasm_bindgen` rules dependencies. (#1464)
82a437cc17 Fixed crate_universe lockfile checks for crates_repository rule (#1468)
e83d5f3c77 Limit coverage to requested files (#1467)
daff327ea7 Stamp only binaries by default (#1452)
adc80a301d Cleanup crate_universe dependency macros (#1460)
824b121acc Updated header of crate_universe generated files to include a regen command (#1461)
d44e1d8363 feat: add `rustc_flags` attr to `rust_doc` rule (#1458)
6b54feb0ff add a way to distinguish proc-macro deps (#1420)
6070b3c9f4 Fixed missing items in distro artifact (#1450)
1e83e74159 do not add proc-macro deps to transitive_link_search_paths (#1424)
ced94dec1b Fix @libgit2 (#1457)
03d1d5e4ac Add extra_rustc_flag and extra_exec_rustc_flag (#1413)
711adc7985 crate_universe: shorten `crate_universe_crate_index` to `cui` (#1454)
8cb9247f18 Replaced small genrules with uses of bazel_skylib (#1451)
38e841aece Upgrade stardoc (#1444)
674762f66a Updated toolchain repository rules to represent one toolchain per repo (#1442)
b22a6e1416 Re-enable disabled windows jobs in CI (#1353)
2fb94c48fd docs: Update homepage to use latest version (#1441)
389c58fcb1 Updated rules_rust to version `0.7.0`. (#1436)
60f26d49d8 exclude `BUILD` and `WORKSPACE` files from generated crate_universe targets (#1437)
26344d4cd7 Have rust_test put its compilation outputs in a subdirectory (#1434)
8b0f644122 Updated crate_universe version to `0.4.0`. (#1435)
adf92b1534 update crate_universe `--repin` args to not require values. (#1433)
da75146d0a Do not attempt to instrument rust code for coverage if rust_toolchain.llvm-cov is None (#1432)
bde2c36821 Added Rust 1.62.0 (#1427)
7056f22bd0 Fixed crate_universe not finding target library names for "rlib"s (#1429)
3d65214d23 crate_universe support for individually updating packages. (#1426)
5a9d999db9 Updated `attr.label` attribute defaults to use `Label` constructor (#1422)
52fc70145a Added `TemplateVariableInfo` to `rust_toolchain`. (#1416)
7465c1aa29 Add test coverage support (#1324)
c5c3603da6 Bump the min supported bazel version (#1414)
937bdc9d05 Add a `cargo_dep_env` rule for setting build.rs environment variables (#1415)
91466fc0d1 Updated `rules_rust` version to `0.6.0`. (#1397)
97264b79d5 Update wasm_bindgen to use crate universe. (#1398)
d3197a65c5 Updated crate_universe version (to `0.3.0`) and dependencies (#1409)
a15e67d666 Deleted "extra workspace member" functionality from crate_universe (#1406)
5910a7585a Use a vec, not set for rustc_flags for crate_universe annotations (#1404)
3aa88ab067 Deleted deprecated `rust_analyzer` rule. (#1405)
7adf7210d0 cargo: Fix handling of relative sysroots (#1371)
57607239ec Enable rustfmt CI for Windows. (#1403)
30e68b49be Added more "ignore" tags to rustfmt and clippy rules. (#1400)
53ad14eead Added support for vendoring external manifests (#1381)
ff243c6ef0 Reorganized rustfmt source tree (#1399)
94e0044afe Refactored the Rustfmt tool to work cross-platform (#1375)
8fca438124 Ran clang-format on all C++ sources (#1396)
e765719e29 Added TemplateVariableInfo to rust_toolchain (#1377)
81590f4b6a Fixed Clippy bug with `--@rules_rust//:clippy_flags`. (#1392)
d77b9f7c6a Use `target_compatible_with` to make `macos` with `Xcode` happy (#1391)
ec27dbe310 Added comments to internal function (#1378)
a9dd2f9200 Removed deprecated file (#1380)
16175c881c Renamed toolchain files targets (#1376)
c7cb5bd7a8 Support crates that have mix of generated and nongenerated inputs (#1340)
521e649ff4 Avoid using common substrings as encodings. (#1370)
28ac6b133d Use a more compact encoding in the `import` macro. (#1365)
3a099bfa97 Fix incorrect assertion in test_env_launcher test (#1368)
4661093fb1 Use target instead of rule in rust_register_toolchains edition docs (#1366)
652f2802e3 Add `env` attribute to `rust_toolchain`. (#1363)
9520f37f1e Update rules_perl in examples (#1364)
1b451c301e Add armv7-linux-androideabi tier 2 triple (#1362)
0265c293f1 Ensure crate_features to be included correctly in rust_project.json (#1356)
121d65fe6a Updated `rules_rust` version to `0.5.0` (#1360)
aca4ec1a0f crate_universe: fix typo (#1314)
69ca2611c5 Don't leak native dependencies of proc_macro (#1359)
4c7f08b8b9 Fixed missing docs (#1358)
e48bec94de feat: build script toolchains annotations (#1344)
ffb946f4b7 Ensure memchr is linked after libobject (#1349)
edca1d8042 Add developing notes for crate_universe (#1351)
120f911d2f Updated rust_bindgen dependencies API (#1354)
42c4528a5f Added Rust 1.61.0 (#1347)
c05e0c6ab1 Fixed fetch_shas script to correctly include .gz and .xz extensions (#1355)
9770b0dd75 Update apple_support (#1346)
87eb807e67 Added support for Rust 1.61.0 to crate_universe (#1348)
84c1d42128 Temporarily disable windows job in CI. (#1352)
421d30e4ff Remove unnecessary `crate_name` usage in `rust_test_suite`. (#1345)
10185339dd Build `rust_test` targets with `crate` using the same crate name as the underlying library target. (#1332)
0049ce3884 Add support for riscv32 targets (#1323)
3aa6de3c63 remove experimental_use_whole_archive_for_native_deps (#1338)
a066bfed46 Replace slashes with underscores in default crate names. (#1336)
1b91e592d5 Revert "Replace slashes with underscores in default crate names. (#1334)" (#1335)
51f8e302e9 "sandwich" rlibs between native deps in linker order (#1333)
df354904a1 Replace slashes with underscores in default crate names. (#1334)
21eed19188 Bump version to 0.4.0 (#1329)
d3d9abac4d Support . workspace member (#1326)
fccaae3055 Error calling `all_crate_deps` without `Cargo.toml` (#1327)
d7c532cb78 Updated wasm_bindgen dependencies API (#1313)
fb4d5546ea Updated wasm_bindgen rules to only require core `rules_nodejs` rules (#1311)
1590670ae1 Prevents running of clippy on bindgen targets (#1320)
73d0164a34 Add support for aarch64-apple-ios-sim (#1304)
61eee54c73 Add bazel-* directories in cargo_manifest_dir/external_crate to gitignore (#1279)
42f78f25e1 crate_universe: Improved documentation (#1305)
bddc4bd94a Silence warnings for example/test dependencies (#1312)
b04fe3b21f Use tinyjson from crates.io instead of github.com. (#1309)
1cab691d14 Remove doc about STATIC_RUST_URL env var. (#1306)
d86e06a884 Don't propagate non-shared transitive linker inputs from `rust_static|shared_library` (#1299)
5abeb93333 Don't emit `CrateInfo` from `rust_static_library` and `rust_shared_library` (#1298)
0175a1b7aa fix for using a nightly channel after https://github.com/bazelbuild/rules_rust/commit/841fc6fb82141576d91aecb1d3f2656d58b0ab71 (#1302)
e07881fa22 Updated crate_universe docs (#1301)
c63ad973f1 rustc: fix a conditional (#1300)
a6f332fcbe Use __EXEC_ROOT__ paths for genfiles in rust_analyzer aspect (#1293)
97de47df51 Remove 'triple' field from triple struct in favor of 'str' (#1297)
58627f59eb Make get_host_triple public to get a triple from Bazel's repository_ctx (#1289)
612f4362bc Updated `rules_rust` version to `0.3.1` (#1296)
26fa5a15de Fixed build issues in release artifact (#1295)
48bb32f817 crate_universe: Added tests for serialized config files. (#1291)
841fc6fb82 Enable xz archives (#1284)
f7cb22efa6 feat(#514): pass extra flags to clippy (#1264)
e9f8b48711 Updated `rules_rust` version to `0.3.0` (#1288)
c900e1c66c Revert "Add workaround for arm vs armv7 android issue (#1259)" (#1290)
01ebef2fb9 Remove DEFAULT_RUST_EDITION (#1256)
03a70796ab Outside of examples, fill in all `edition` attrs (#1257)
207ee4fbcf feat: support extra deps in rust_doc_test (#1286)
4e7d114a8e Fix typo in render config doc (#1281)
db17f291d3 Fix crate annotation anchor (#1282)
fdb6851a92 Fix target name in `rust_test` example. (#1280)
4fb6e40147 Don't leak additive build file content across crates (#1278)
965044ae2b Remove `rust_test` example which doesn't build. (#1267)
f6e7e0a93f add a stopgap experimental_use_whole_archive_for_native_deps attribute (#1269)
34fd46756a process_wrapper: add support for terminating rustc after it emits rmeta. (#1207)
b778fca0ac crate_universe: propagate build_script_tools (#1266)
f6f27a8734 Add workaround for arm vs armv7 android issue (#1259)
c3f56c2d50 Add the BUILD.bazel file to the wasm_bindgen distro filegroup target (#1246) (#1261)
1f2e6231de Set edition for process_wrapper and cargo_build_script_runner (#1254)
55790492ac Updated Rust to 1.60.0 (#1247)
b594f90f17 Workaround for issue in linux Cargo binaries (#1252)
8f0dd9042e rust_test_suite: ensure crate names are valid (#1248)
4144ddeb9c Updating `rules_rust` version to `0.2.1` (#1243)
65cad76a52 Fixed proto package in release artifact (#1241)
4d8de6e361 Updated repository pin in the docs (#1240)
e5a3970754 Updating `rules_rust` version to `0.2.0` (#1235)
d061bf640e Updated `crate_universe` version to `0.2.0` (#1239)
c0505da0d2 Replace `rust_repositories` with `rust_register_toolchains` in docs (#1237)
145ad7609f Fixed `crates_repository` deleting `.cargo/config.toml` files. (#1227)
20066b05e2 fix: distribute `//tools/rust_analyzer` (#1234)
b58ce89603 Enabled `rust_doc_test` for `crate_universe` (#1232)
d2e2470cbf Fix some unit tests to run from another workspace (#1226)
b03aee039a Fixed `crate_universe` clippy defects (#1228)
41b39f0c99 add bots using lld (and examples with clang as a drive by) (#1221)
84e98e4d2f don't emit --codegen={metadata,extra-filename} for rust_static_library and rust_shared_library (#1222)
e48c834a40 Renamed `crate_index` repository to `crate_universe_crate_index` (#1217)
99b4b2574f fix use of stamping in rust_binary_without_process_wrapper (#1218)
8df4517d37 Add NUM_JOBS env var to cargo build scripts (#1216)
628e85e70f Restrucutred `crate_universe` dependency macros (#1208)
e3d67a0a10 Updated docs to guide users to using releases (#1213)
fd912e644c Updated crate_universe docs. (#1212)
cde4c0826c Delete deprecated `rules` targets (#1210)
26e01c8386 cache the release archive in release actions (#1201)
3205c9d846 Updated crate_universe setup guide (#1202)
c078494678 Don't leak deps from rust_proc_macro (#1206)
7c865ffeb1 Build `_import_macro_dep` in `exec` mode (#1203)
635da93206 Updating `rules_rust` version to `0.1.0`. (#1198)
6c797c9070 disable advanced features of C++ rules (#1200)
86d47a1bba Tweak import macro bootstrap to trick rust analyzer aspect (#1179)
80d197205a Added release workflow (#1195)
cd44b3670a Added support for producing distribution archives (#1194)
a665447989 Traverse custom alias-like rules in the rust analyzer aspect (#1190)
4504983fa9 Add a test showing that rust_analayzer aspect traverses aliases (#1188)
297dd18215 Updated `crate_universe` to version `0.1.0`. (#1197)
0d9c7d5e1b Specify root target for rust_analyzer test (#1189)
4a0352fecd Updated `crate_universe` dependencies (#1196)
5126479254 Fixed crate_universe release tools (#1193)
e840400eb6 Remove last remains of use_process_wrapper flag. (#1192)
eb7db68d96 Fix iOS linker arguments (#1186)
de726a10c9 Create internal rust_binary rule instead of using transitions (#1187)
5e6ad9f638 Regenerated `cargo-raze` outputs with v0.15.0 (#1184)
980b662843 add static_library to get_preferred_artifact with pic (#1183)
97fd329540 Populate CFLAGS and CXXFLAGS when invoking build script. (#1081)
git-subtree-dir: third_party/rules_rust
git-subtree-split: 078c6908fc32c168b58e72cc3884dd8e30419e3a
Signed-off-by: Brian Silverman <bsilver16384@gmail.com>
Change-Id: Ifc218edaa852263bd76835ee7de44de07c08aec2
diff --git a/crate_universe/src/cli.rs b/crate_universe/src/cli.rs
index 2ed27ac..fdff844 100644
--- a/crate_universe/src/cli.rs
+++ b/crate_universe/src/cli.rs
@@ -19,7 +19,11 @@
pub use vendor::vendor;
#[derive(Parser, Debug)]
-#[clap(name = "cargo-bazel", about, version)]
+#[clap(
+ name = "cargo-bazel",
+ about = "crate_universe` is a collection of tools which use Cargo to generate build targets for Bazel.",
+ version
+)]
pub enum Options {
/// Generate Bazel Build files from a Cargo manifest.
Generate(GenerateOptions),
diff --git a/crate_universe/src/cli/generate.rs b/crate_universe/src/cli/generate.rs
index 67ae868..3fdc97e 100644
--- a/crate_universe/src/cli/generate.rs
+++ b/crate_universe/src/cli/generate.rs
@@ -1,13 +1,14 @@
//! The cli entrypoint for the `generate` subcommand
+use std::fs;
use std::path::PathBuf;
-use anyhow::{bail, Result};
+use anyhow::{bail, Context as AnyhowContext, Result};
use clap::Parser;
use crate::config::Config;
use crate::context::Context;
-use crate::lockfile::{is_cargo_lockfile, lock_context, write_lockfile, LockfileKind};
+use crate::lockfile::{lock_context, write_lockfile};
use crate::metadata::load_metadata;
use crate::metadata::Annotations;
use crate::rendering::{write_outputs, Renderer};
@@ -15,7 +16,7 @@
/// Command line options for the `generate` subcommand
#[derive(Parser, Debug)]
-#[clap(about, version)]
+#[clap(about = "Command line options for the `generate` subcommand", version)]
pub struct GenerateOptions {
/// The path to a Cargo binary to use for gathering metadata
#[clap(long, env = "CARGO")]
@@ -35,11 +36,11 @@
/// The path to either a Cargo or Bazel lockfile
#[clap(long)]
- pub lockfile: PathBuf,
+ pub lockfile: Option<PathBuf>,
- /// The type of lockfile
+ /// The path to a [Cargo.lock](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html) file.
#[clap(long)]
- pub lockfile_kind: LockfileKind,
+ pub cargo_lockfile: PathBuf,
/// The directory of the current repository rule
#[clap(long)]
@@ -54,7 +55,7 @@
#[clap(long)]
pub repin: bool,
- /// The path to a Cargo metadata `json` file.
+ /// The path to a Cargo metadata `json` file. This file must be next to a `Cargo.toml` and `Cargo.lock` file.
#[clap(long)]
pub metadata: Option<PathBuf>,
@@ -67,25 +68,19 @@
// Load the config
let config = Config::try_from_path(&opt.config)?;
- // Determine if the dependencies need to be repinned.
- let mut should_repin = opt.repin;
-
- // Cargo lockfiles must always be repinned.
- if is_cargo_lockfile(&opt.lockfile, &opt.lockfile_kind) {
- should_repin = true;
- }
-
// Go straight to rendering if there is no need to repin
- if !should_repin {
- let context = Context::try_from_path(opt.lockfile)?;
+ if !opt.repin {
+ if let Some(lockfile) = &opt.lockfile {
+ let context = Context::try_from_path(lockfile)?;
- // Render build files
- let outputs = Renderer::new(config.rendering).render(&context)?;
+ // Render build files
+ let outputs = Renderer::new(config.rendering).render(&context)?;
- // Write the outputs to disk
- write_outputs(outputs, &opt.repository_dir, opt.dry_run)?;
+ // Write the outputs to disk
+ write_outputs(outputs, &opt.repository_dir, opt.dry_run)?;
- return Ok(());
+ return Ok(());
+ }
}
// Ensure Cargo and Rustc are available for use during generation.
@@ -105,20 +100,13 @@
};
// Load Metadata and Lockfile
- let (cargo_metadata, cargo_lockfile) = load_metadata(
- metadata_path,
- if is_cargo_lockfile(&opt.lockfile, &opt.lockfile_kind) {
- Some(&opt.lockfile)
- } else {
- None
- },
- )?;
+ let (cargo_metadata, cargo_lockfile) = load_metadata(metadata_path)?;
// Copy the rendering config for later use
let render_config = config.rendering.clone();
// Annotate metadata
- let annotations = Annotations::new(cargo_metadata, cargo_lockfile, config.clone())?;
+ let annotations = Annotations::new(cargo_metadata, cargo_lockfile.clone(), config.clone())?;
// Generate renderable contexts for earch package
let context = Context::new(annotations)?;
@@ -130,13 +118,18 @@
write_outputs(outputs, &opt.repository_dir, opt.dry_run)?;
// Ensure Bazel lockfiles are written to disk so future generations can be short-circuted.
- if matches!(opt.lockfile_kind, LockfileKind::Bazel) {
+ if let Some(lockfile) = opt.lockfile {
let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?;
- let lockfile = lock_context(context, &config, &splicing_manifest, cargo_bin, rustc_bin)?;
+ let lock_content =
+ lock_context(context, &config, &splicing_manifest, cargo_bin, rustc_bin)?;
- write_lockfile(lockfile, &opt.lockfile, opt.dry_run)?;
+ write_lockfile(lock_content, &lockfile, opt.dry_run)?;
}
+ // Write the updated Cargo.lock file
+ fs::write(&opt.cargo_lockfile, cargo_lockfile.to_string())
+ .context("Failed to write Cargo.lock file back to the workspace.")?;
+
Ok(())
}
diff --git a/crate_universe/src/cli/query.rs b/crate_universe/src/cli/query.rs
index 668f64f..19087ab 100644
--- a/crate_universe/src/cli/query.rs
+++ b/crate_universe/src/cli/query.rs
@@ -13,7 +13,7 @@
/// Command line options for the `query` subcommand
#[derive(Parser, Debug)]
-#[clap(about, version)]
+#[clap(about = "Command line options for the `query` subcommand", version)]
pub struct QueryOptions {
/// The lockfile path for reproducible Cargo->Bazel renderings
#[clap(long)]
diff --git a/crate_universe/src/cli/splice.rs b/crate_universe/src/cli/splice.rs
index cb8ba20..213ee34 100644
--- a/crate_universe/src/cli/splice.rs
+++ b/crate_universe/src/cli/splice.rs
@@ -2,34 +2,37 @@
use std::path::PathBuf;
+use anyhow::Context;
use clap::Parser;
use crate::cli::Result;
-use crate::metadata::{write_metadata, Generator, MetadataGenerator};
-use crate::splicing::{
- generate_lockfile, ExtraManifestsManifest, Splicer, SplicingManifest, WorkspaceMetadata,
-};
+use crate::metadata::{write_metadata, CargoUpdateRequest, Generator, MetadataGenerator};
+use crate::splicing::{generate_lockfile, Splicer, SplicingManifest, WorkspaceMetadata};
/// Command line options for the `splice` subcommand
#[derive(Parser, Debug)]
-#[clap(about, version)]
+#[clap(about = "Command line options for the `splice` subcommand", version)]
pub struct SpliceOptions {
/// A generated manifest of splicing inputs
#[clap(long)]
pub splicing_manifest: PathBuf,
- /// A generated manifest of "extra workspace members"
- #[clap(long)]
- pub extra_manifests_manifest: PathBuf,
-
- /// A Cargo lockfile (Cargo.lock).
+ /// The path to a [Cargo.lock](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html) file.
#[clap(long)]
pub cargo_lockfile: Option<PathBuf>,
- /// The directory in which to build the workspace. A `Cargo.toml` file
- /// should always be produced within this directory.
+ /// The desired update/repin behavior
+ #[clap(long, env = "CARGO_BAZEL_REPIN", default_missing_value = "true")]
+ pub repin: Option<CargoUpdateRequest>,
+
+ /// The directory in which to build the workspace. If this argument is not
+ /// passed, a temporary directory will be generated.
#[clap(long)]
- pub workspace_dir: PathBuf,
+ pub workspace_dir: Option<PathBuf>,
+
+ /// The location where the results of splicing are written.
+ #[clap(long)]
+ pub output_dir: PathBuf,
/// If true, outputs will be printed instead of written to disk.
#[clap(long)]
@@ -52,39 +55,62 @@
pub fn splice(opt: SpliceOptions) -> Result<()> {
// Load the all config files required for splicing a workspace
let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?;
- let extra_manifests_manifest =
- ExtraManifestsManifest::try_from_path(opt.extra_manifests_manifest)?;
+
+ // Determine the splicing workspace
+ let temp_dir;
+ let splicing_dir = match &opt.workspace_dir {
+ Some(dir) => dir.clone(),
+ None => {
+ temp_dir = tempfile::tempdir().context("Failed to generate temporary directory")?;
+ temp_dir.as_ref().to_path_buf()
+ }
+ };
// Generate a splicer for creating a Cargo workspace manifest
- let splicer = Splicer::new(
- opt.workspace_dir,
- splicing_manifest,
- extra_manifests_manifest,
- )?;
+ let splicer = Splicer::new(splicing_dir, splicing_manifest)?;
// Splice together the manifest
- let manifest_path = splicer.splice_workspace()?;
+ let manifest_path = splicer.splice_workspace(&opt.cargo)?;
// Generate a lockfile
- let cargo_lockfile =
- generate_lockfile(&manifest_path, &opt.cargo_lockfile, &opt.cargo, &opt.rustc)?;
+ let cargo_lockfile = generate_lockfile(
+ &manifest_path,
+ &opt.cargo_lockfile,
+ &opt.cargo,
+ &opt.rustc,
+ &opt.repin,
+ )?;
// Write the registry url info to the manifest now that a lockfile has been generated
WorkspaceMetadata::write_registry_urls(&cargo_lockfile, &manifest_path)?;
+ let output_dir = opt.output_dir.clone();
+
// Write metadata to the workspace for future reuse
let (cargo_metadata, _) = Generator::new()
.with_cargo(opt.cargo)
.with_rustc(opt.rustc)
.generate(&manifest_path.as_path_buf())?;
- // Write metadata next to the manifest
- let metadata_path = manifest_path
+ let cargo_lockfile_path = manifest_path
.as_path_buf()
.parent()
- .expect("Newly spliced cargo manifest has no parent directory")
- .join("cargo-bazel-spliced-metadata.json");
- write_metadata(&metadata_path, &cargo_metadata)?;
+ .with_context(|| {
+ format!(
+ "The path {} is expected to have a parent directory",
+ manifest_path.as_path_buf().display()
+ )
+ })?
+ .join("Cargo.lock");
+
+ // Generate the consumable outputs of the splicing process
+ std::fs::create_dir_all(&output_dir)
+ .with_context(|| format!("Failed to create directories for {}", &output_dir.display()))?;
+
+ write_metadata(&opt.output_dir.join("metadata.json"), &cargo_metadata)?;
+
+ std::fs::copy(cargo_lockfile_path, output_dir.join("Cargo.lock"))
+ .context("Failed to copy lockfile")?;
Ok(())
}
diff --git a/crate_universe/src/cli/vendor.rs b/crate_universe/src/cli/vendor.rs
index 68e107f..0b90541 100644
--- a/crate_universe/src/cli/vendor.rs
+++ b/crate_universe/src/cli/vendor.rs
@@ -1,6 +1,7 @@
//! The cli entrypoint for the `vendor` subcommand
use std::collections::BTreeSet;
+use std::env;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{self, ExitStatus};
@@ -10,16 +11,15 @@
use crate::config::{Config, VendorMode};
use crate::context::Context;
+use crate::metadata::CargoUpdateRequest;
use crate::metadata::{Annotations, VendorGenerator};
use crate::metadata::{Generator, MetadataGenerator};
use crate::rendering::{render_module_label, write_outputs, Renderer};
-use crate::splicing::{
- generate_lockfile, ExtraManifestsManifest, Splicer, SplicingManifest, WorkspaceMetadata,
-};
+use crate::splicing::{generate_lockfile, Splicer, SplicingManifest, WorkspaceMetadata};
/// Command line options for the `vendor` subcommand
#[derive(Parser, Debug)]
-#[clap(about, version)]
+#[clap(about = "Command line options for the `vendor` subcommand", version)]
pub struct VendorOptions {
/// The path to a Cargo binary to use for gathering metadata
#[clap(long, env = "CARGO")]
@@ -41,7 +41,7 @@
#[clap(long)]
pub splicing_manifest: PathBuf,
- /// The path to a Cargo lockfile
+ /// The path to a [Cargo.lock](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html) file.
#[clap(long)]
pub cargo_lockfile: Option<PathBuf>,
@@ -50,13 +50,19 @@
#[clap(long)]
pub cargo_config: Option<PathBuf>,
+ /// The desired update/repin behavior. The arguments passed here are forward to
+ /// [cargo update](https://doc.rust-lang.org/cargo/commands/cargo-update.html). See
+ /// [metadata::CargoUpdateRequest] for details on the values to pass here.
+ #[clap(long, env = "CARGO_BAZEL_REPIN", default_missing_value = "true")]
+ pub repin: Option<CargoUpdateRequest>,
+
/// The path to a Cargo metadata `json` file.
#[clap(long)]
pub metadata: Option<PathBuf>,
- /// A generated manifest of "extra workspace members"
- #[clap(long)]
- pub extra_manifests_manifest: PathBuf,
+ /// The path to a bazel binary
+ #[clap(long, env = "BAZEL_REAL", default_value = "bazel")]
+ pub bazel: PathBuf,
/// The directory in which to build the workspace. A `Cargo.toml` file
/// should always be produced within this directory.
@@ -83,31 +89,55 @@
Ok(status)
}
+/// Query the Bazel output_base to determine the location of external repositories.
+fn locate_bazel_output_base(bazel: &Path, workspace_dir: &Path) -> Result<PathBuf> {
+ // Allow a predefined environment variable to take precedent. This
+ // solves for the specific needs of Bazel CI on Github.
+ if let Ok(output_base) = env::var("OUTPUT_BASE") {
+ return Ok(PathBuf::from(output_base));
+ }
+
+ let output = process::Command::new(bazel)
+ .current_dir(workspace_dir)
+ .args(["info", "output_base"])
+ .output()
+ .context("Failed to query the Bazel workspace's `output_base`")?;
+
+ if !output.status.success() {
+ bail!(output.status)
+ }
+
+ Ok(PathBuf::from(
+ String::from_utf8_lossy(&output.stdout).trim(),
+ ))
+}
+
pub fn vendor(opt: VendorOptions) -> Result<()> {
+ let output_base = locate_bazel_output_base(&opt.bazel, &opt.workspace_dir)?;
+
// Load the all config files required for splicing a workspace
- let splicing_manifest =
- SplicingManifest::try_from_path(&opt.splicing_manifest)?.absoulutize(&opt.workspace_dir);
- let extra_manifests_manifest =
- ExtraManifestsManifest::try_from_path(opt.extra_manifests_manifest)?.absoulutize();
+ let splicing_manifest = SplicingManifest::try_from_path(&opt.splicing_manifest)?
+ .resolve(&opt.workspace_dir, &output_base);
let temp_dir = tempfile::tempdir().context("Failed to create temporary directory")?;
// Generate a splicer for creating a Cargo workspace manifest
- let splicer = Splicer::new(
- PathBuf::from(temp_dir.as_ref()),
- splicing_manifest,
- extra_manifests_manifest,
- )
- .context("Failed to crate splicer")?;
+ let splicer = Splicer::new(PathBuf::from(temp_dir.as_ref()), splicing_manifest)
+ .context("Failed to create splicer")?;
// Splice together the manifest
let manifest_path = splicer
- .splice_workspace()
+ .splice_workspace(&opt.cargo)
.context("Failed to splice workspace")?;
- // Generate a lockfile
- let cargo_lockfile =
- generate_lockfile(&manifest_path, &opt.cargo_lockfile, &opt.cargo, &opt.rustc)?;
+ // Gather a cargo lockfile
+ let cargo_lockfile = generate_lockfile(
+ &manifest_path,
+ &opt.cargo_lockfile,
+ &opt.cargo,
+ &opt.rustc,
+ &opt.repin,
+ )?;
// Write the registry url info to the manifest now that a lockfile has been generated
WorkspaceMetadata::write_registry_urls(&cargo_lockfile, &manifest_path)?;
@@ -122,7 +152,7 @@
let config = Config::try_from_path(&opt.config)?;
// Annotate metadata
- let annotations = Annotations::new(cargo_metadata, cargo_lockfile, config.clone())?;
+ let annotations = Annotations::new(cargo_metadata, cargo_lockfile.clone(), config.clone())?;
// Generate renderable contexts for earch package
let context = Context::new(annotations)?;
@@ -143,6 +173,12 @@
.with_context(|| format!("Failed to delete {}", vendor_dir.display()))?;
}
+ // Store the updated Cargo.lock
+ if let Some(path) = &opt.cargo_lockfile {
+ fs::write(path, cargo_lockfile.to_string())
+ .context("Failed to write Cargo.lock file back to the workspace.")?;
+ }
+
// Vendor the crates from the spliced workspace
if matches!(config.rendering.vendor_mode, Some(VendorMode::Local)) {
VendorGenerator::new(opt.cargo.clone(), opt.rustc.clone())
diff --git a/crate_universe/src/config.rs b/crate_universe/src/config.rs
index 66e3a7e..0560829 100644
--- a/crate_universe/src/config.rs
+++ b/crate_universe/src/config.rs
@@ -8,7 +8,7 @@
use std::{fmt, fs};
use anyhow::Result;
-use cargo_lock::package::source::GitReference;
+use cargo_lock::package::GitReference;
use cargo_metadata::Package;
use semver::VersionReq;
use serde::de::Visitor;
@@ -73,6 +73,9 @@
#[serde(default = "default_platforms_template")]
pub platforms_template: String,
+ /// The command to use for regenerating generated files.
+ pub regen_command: String,
+
/// An optional configuration for rendirng content to be rendered into repositories.
pub vendor_mode: Option<VendorMode>,
}
@@ -211,6 +214,10 @@
/// [rustc_env](https://bazelbuild.github.io/rules_rust/cargo.html#cargo_build_script-rustc_env) attribute.
pub build_script_rustc_env: Option<BTreeMap<String, String>>,
+ /// Additional labels to pass to a build script's
+ /// [toolchains](https://bazel.build/reference/be/common-definitions#common-attributes) attribute.
+ pub build_script_toolchains: Option<BTreeSet<String>>,
+
/// A scratch pad used to write arbitrary text to target BUILD files.
pub additive_build_file_content: Option<String>,
@@ -302,6 +309,7 @@
build_script_data_glob: joined_extra_member!(self.build_script_data_glob, rhs.build_script_data_glob, BTreeSet::new, BTreeSet::extend),
build_script_env: joined_extra_member!(self.build_script_env, rhs.build_script_env, BTreeMap::new, BTreeMap::extend),
build_script_rustc_env: joined_extra_member!(self.build_script_rustc_env, rhs.build_script_rustc_env, BTreeMap::new, BTreeMap::extend),
+ build_script_toolchains: joined_extra_member!(self.build_script_toolchains, rhs.build_script_toolchains, BTreeSet::new, BTreeSet::extend),
additive_build_file_content: joined_extra_member!(self.additive_build_file_content, rhs.additive_build_file_content, String::new, concat_string),
shallow_since,
patch_args: joined_extra_member!(self.patch_args, rhs.patch_args, Vec::new, Vec::extend),
@@ -492,4 +500,35 @@
id.version = "<1".to_owned();
assert!(!id.matches(&package));
}
+
+ #[test]
+ fn deserialize_config() {
+ let runfiles = runfiles::Runfiles::create().unwrap();
+ let path = runfiles
+ .rlocation("rules_rust/crate_universe/test_data/serialized_configs/config.json");
+
+ let content = std::fs::read_to_string(path).unwrap();
+
+ let config: Config = serde_json::from_str(&content).unwrap();
+
+ // Annotations
+ let annotation = config
+ .annotations
+ .get(&CrateId::new("rand".to_owned(), "0.8.5".to_owned()))
+ .unwrap();
+ assert_eq!(
+ annotation.crate_features,
+ Some(BTreeSet::from(["small_rng".to_owned()]))
+ );
+
+ // Global settings
+ assert!(config.cargo_config.is_none());
+ assert!(!config.generate_build_scripts);
+
+ // Render Config
+ assert_eq!(
+ config.rendering.platforms_template,
+ "//custom/platform:{triple}"
+ );
+ }
}
diff --git a/crate_universe/src/context/crate_context.rs b/crate_universe/src/context/crate_context.rs
index 0278ebe..923fa1e 100644
--- a/crate_universe/src/context/crate_context.rs
+++ b/crate_universe/src/context/crate_context.rs
@@ -101,8 +101,8 @@
#[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
pub rustc_env_files: SelectStringList,
- #[serde(skip_serializing_if = "SelectStringList::should_skip_serializing")]
- pub rustc_flags: SelectStringList,
+ #[serde(skip_serializing_if = "Vec::is_empty")]
+ pub rustc_flags: Vec<String>,
pub version: String,
@@ -179,6 +179,9 @@
#[serde(skip_serializing_if = "Option::is_none")]
pub links: Option<String>,
+
+ #[serde(skip_serializing_if = "BTreeSet::is_empty")]
+ pub toolchains: BTreeSet<String>,
}
impl Default for BuildScriptAttributes {
@@ -198,6 +201,7 @@
rustc_env_files: Default::default(),
tools: Default::default(),
links: Default::default(),
+ toolchains: Default::default(),
}
}
}
@@ -417,12 +421,8 @@
}
// Rustc flags
- // TODO: SelectList is currently backed by `BTreeSet` which is generally incorrect
- // for rustc flags. Should SelectList be refactored?
if let Some(extra) = &crate_extra.rustc_flags {
- for data in extra.iter() {
- self.common_attrs.rustc_flags.insert(data.clone(), None);
- }
+ self.common_attrs.rustc_flags.append(&mut extra.clone());
}
// Rustc env
@@ -456,6 +456,20 @@
}
}
+ // Tools
+ if let Some(extra) = &crate_extra.build_script_tools {
+ for data in extra {
+ attrs.tools.insert(data.clone(), None);
+ }
+ }
+
+ // Toolchains
+ if let Some(extra) = &crate_extra.build_script_toolchains {
+ for data in extra {
+ attrs.toolchains.insert(data.clone());
+ }
+ }
+
// Data glob
if let Some(extra) = &crate_extra.build_script_data_glob {
attrs.data_glob.extend(extra.clone());
@@ -563,11 +577,10 @@
let crate_name = sanitize_module_name(&target.name);
// Locate the crate's root source file relative to the package root normalized for unix
- let crate_root =
- pathdiff::diff_paths(target.src_path.to_string(), package_root).map(
- // Normalize the path so that it always renders the same regardless of platform
- |root| root.to_string_lossy().replace("\\", "/"),
- );
+ let crate_root = pathdiff::diff_paths(&target.src_path, package_root).map(
+ // Normalize the path so that it always renders the same regardless of platform
+ |root| root.to_string_lossy().replace('\\', "/"),
+ );
// Conditionally check to see if the dependencies is a build-script target
if include_build_scripts && kind == "custom-build" {
diff --git a/crate_universe/src/lockfile.rs b/crate_universe/src/lockfile.rs
index 91f4832..65738a6 100644
--- a/crate_universe/src/lockfile.rs
+++ b/crate_universe/src/lockfile.rs
@@ -1,11 +1,11 @@
//! Utility module for interracting with different kinds of lock files
+use std::collections::HashMap;
use std::convert::TryFrom;
use std::ffi::OsStr;
use std::fs;
use std::path::Path;
use std::process::Command;
-use std::str::FromStr;
use anyhow::{bail, Context as AnyhowContext, Result};
use hex::ToHex;
@@ -16,61 +16,6 @@
use crate::context::Context;
use crate::splicing::{SplicingManifest, SplicingMetadata};
-#[derive(Debug)]
-pub enum LockfileKind {
- Auto,
- Bazel,
- Cargo,
-}
-
-impl LockfileKind {
- pub fn detect(path: &Path) -> Result<Self> {
- let content = fs::read_to_string(path)?;
-
- if serde_json::from_str::<Context>(&content).is_ok() {
- return Ok(Self::Bazel);
- }
-
- if cargo_lock::Lockfile::from_str(&content).is_ok() {
- return Ok(Self::Cargo);
- }
-
- bail!("Unknown Lockfile kind for {}", path.display())
- }
-}
-
-impl FromStr for LockfileKind {
- type Err = anyhow::Error;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- let lower = s.to_lowercase();
- if lower == "auto" {
- return Ok(Self::Auto);
- }
-
- if lower == "bazel" {
- return Ok(Self::Bazel);
- }
-
- if lower == "cargo" {
- return Ok(Self::Cargo);
- }
-
- bail!("Unknown LockfileKind: '{}'", s)
- }
-}
-
-pub fn is_cargo_lockfile(path: &Path, kind: &LockfileKind) -> bool {
- match kind {
- LockfileKind::Auto => match LockfileKind::detect(path) {
- Ok(kind) => matches!(kind, LockfileKind::Cargo),
- Err(_) => false,
- },
- LockfileKind::Bazel => false,
- LockfileKind::Cargo => true,
- }
-}
-
pub fn lock_context(
mut context: Context,
config: &Config,
@@ -198,8 +143,31 @@
bail!("Failed to query cargo version")
}
- let version = String::from_utf8(output.stdout)?;
- Ok(version)
+ let version = String::from_utf8(output.stdout)?.trim().to_owned();
+
+ // TODO: There is a bug in the linux binary for Cargo 1.60.0 where
+ // the commit hash reported by the version is shorter than what's
+ // reported on other platforms. This conditional here is a hack to
+ // correct for this difference and ensure lockfile hashes can be
+ // computed consistently. If a new binary is released then this
+ // condition should be removed
+ // https://github.com/rust-lang/cargo/issues/10547
+ let corrections = HashMap::from([
+ (
+ "cargo 1.60.0 (d1fd9fe 2022-03-01)",
+ "cargo 1.60.0 (d1fd9fe2c 2022-03-01)",
+ ),
+ (
+ "cargo 1.61.0 (a028ae4 2022-04-29)",
+ "cargo 1.61.0 (a028ae42f 2022-04-29)",
+ ),
+ ]);
+
+ if corrections.contains_key(version.as_str()) {
+ Ok(corrections[version.as_str()].to_string())
+ } else {
+ Ok(version)
+ }
}
}
@@ -223,7 +191,6 @@
use super::*;
use std::collections::{BTreeMap, BTreeSet};
- use std::fs;
#[test]
fn simple_digest() {
@@ -242,7 +209,7 @@
assert_eq!(
digest,
- Digest("4c8bc5de2d6d7acc7997ae9870e52bc0f0fcbc2b94076e61162078be6a69cc3b".to_owned())
+ Digest("9711073103bd532b7d9c2e32e805280d29fc8591c3e76f9fe489fc372e2866db".to_owned())
);
}
@@ -285,7 +252,7 @@
assert_eq!(
digest,
- Digest("7a0d2f5fce05c4d433826b5c4748bec7b125b79182de598dc700e893e09077e9".to_owned())
+ Digest("756a613410573552bb8a85d6fcafd24a9df3000b8d943bf74c38bda9c306ef0e".to_owned())
);
}
@@ -316,7 +283,7 @@
assert_eq!(
digest,
- Digest("fb5d7854dae366d4a9ff135208c28f08c14c2608dd6c5aa1b35b6e677dd53c06".to_owned())
+ Digest("851b789765d8ee248fd3d55840ffd702ba2f8b0ca6aed2faa45ea63d1b011a99".to_owned())
);
}
@@ -365,59 +332,7 @@
assert_eq!(
digest,
- Digest("2b32833e4265bce03df70dbb9c2b32a78879cc02fbe88a481e3fe4a17812aca9".to_owned())
+ Digest("a9f7ea66f1b04331f8e09c64cd0b972e4c2a136907d7ef90e81ae2654e3c002c".to_owned())
);
}
-
- #[test]
- fn detect_bazel_lockfile() {
- let temp_dir = tempfile::tempdir().unwrap();
- let lockfile = temp_dir.as_ref().join("lockfile");
- fs::write(
- &lockfile,
- serde_json::to_string(&crate::context::Context::default()).unwrap(),
- )
- .unwrap();
-
- let kind = LockfileKind::detect(&lockfile).unwrap();
- assert!(matches!(kind, LockfileKind::Bazel));
- }
-
- #[test]
- fn detect_cargo_lockfile() {
- let temp_dir = tempfile::tempdir().unwrap();
- let lockfile = temp_dir.as_ref().join("lockfile");
- fs::write(
- &lockfile,
- textwrap::dedent(
- r#"
- version = 3
-
- [[package]]
- name = "detect"
- version = "0.1.0"
- "#,
- ),
- )
- .unwrap();
-
- let kind = LockfileKind::detect(&lockfile).unwrap();
- assert!(matches!(kind, LockfileKind::Cargo));
- }
-
- #[test]
- fn detect_invalid_lockfile() {
- let temp_dir = tempfile::tempdir().unwrap();
- let lockfile = temp_dir.as_ref().join("lockfile");
- fs::write(&lockfile, "]} invalid {[").unwrap();
-
- assert!(LockfileKind::detect(&lockfile).is_err());
- }
-
- #[test]
- fn detect_missing_lockfile() {
- let temp_dir = tempfile::tempdir().unwrap();
- let lockfile = temp_dir.as_ref().join("lockfile");
- assert!(LockfileKind::detect(&lockfile).is_err());
- }
}
diff --git a/crate_universe/src/metadata.rs b/crate_universe/src/metadata.rs
index 0e48676..57d90e7 100644
--- a/crate_universe/src/metadata.rs
+++ b/crate_universe/src/metadata.rs
@@ -7,6 +7,7 @@
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
+use std::str::FromStr;
use anyhow::{bail, Context, Result};
use cargo_lock::Lockfile as CargoLockfile;
@@ -74,6 +75,99 @@
}
}
+/// A configuration desrcibing how to invoke [cargo update](https://doc.rust-lang.org/cargo/commands/cargo-update.html).
+#[derive(Debug, PartialEq)]
+pub enum CargoUpdateRequest {
+ /// Translates to an unrestricted `cargo update` command
+ Eager,
+
+ /// Translates to `cargo update --workspace`
+ Workspace,
+
+ /// Translates to `cargo update --package foo` with an optional `--precise` argument.
+ Package {
+ /// The name of the crate used with `--package`.
+ name: String,
+
+ /// If set, the `--precise` value that pairs with `--package`.
+ version: Option<String>,
+ },
+}
+
+impl FromStr for CargoUpdateRequest {
+ type Err = anyhow::Error;
+
+ fn from_str(s: &str) -> Result<Self, Self::Err> {
+ let lower = s.to_lowercase();
+
+ if ["1", "yes", "true", "on"].contains(&lower.as_str()) {
+ return Ok(Self::Eager);
+ }
+
+ if ["workspace", "minimal"].contains(&lower.as_str()) {
+ return Ok(Self::Workspace);
+ }
+
+ let mut split = s.splitn(2, '@');
+ Ok(Self::Package {
+ name: split.next().map(|s| s.to_owned()).unwrap(),
+ version: split.next().map(|s| s.to_owned()),
+ })
+ }
+}
+
+impl CargoUpdateRequest {
+ /// Determine what arguments to pass to the `cargo update` command.
+ fn get_update_args(&self) -> Vec<String> {
+ match self {
+ CargoUpdateRequest::Eager => Vec::new(),
+ CargoUpdateRequest::Workspace => vec!["--workspace".to_owned()],
+ CargoUpdateRequest::Package { name, version } => {
+ let mut update_args = vec!["--package".to_owned(), name.clone()];
+
+ if let Some(version) = version {
+ update_args.push("--precise".to_owned());
+ update_args.push(version.clone());
+ }
+
+ update_args
+ }
+ }
+ }
+
+ /// Calls `cargo update` with arguments specific to the state of the current variant.
+ pub fn update(&self, manifest: &Path, cargo_bin: &Path, rustc_bin: &Path) -> Result<()> {
+ let manifest_dir = manifest.parent().unwrap();
+
+ // Simply invoke `cargo update`
+ let output = Command::new(cargo_bin)
+ // Cargo detects config files based on `pwd` when running so
+ // to ensure user provided Cargo config files are used, it's
+ // critical to set the working directory to the manifest dir.
+ .current_dir(manifest_dir)
+ .arg("update")
+ .arg("--manifest-path")
+ .arg(manifest)
+ .args(self.get_update_args())
+ .env("RUSTC", &rustc_bin)
+ .output()
+ .with_context(|| {
+ format!(
+ "Error running cargo to update packages for manifest '{}'",
+ manifest.display()
+ )
+ })?;
+
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ bail!(format!("Failed to update lockfile: {}", output.status))
+ }
+
+ Ok(())
+ }
+}
+
pub struct LockGenerator {
/// The path to a `cargo` binary
cargo_bin: PathBuf,
@@ -94,11 +188,12 @@
&self,
manifest_path: &Path,
existing_lock: &Option<PathBuf>,
+ update_request: &Option<CargoUpdateRequest>,
) -> Result<cargo_lock::Lockfile> {
let manifest_dir = manifest_path.parent().unwrap();
let generated_lockfile_path = manifest_dir.join("Cargo.lock");
- let output = if let Some(lock) = existing_lock {
+ if let Some(lock) = existing_lock {
if !lock.exists() {
bail!(
"An existing lockfile path was provided but a file at '{}' does not exist",
@@ -112,9 +207,13 @@
}
fs::copy(&lock, &generated_lockfile_path)?;
+ if let Some(request) = update_request {
+ request.update(manifest_path, &self.cargo_bin, &self.rustc_bin)?;
+ }
+
// Ensure the Cargo cache is up to date to simulate the behavior
// of having just generated a new one
- Command::new(&self.cargo_bin)
+ let output = Command::new(&self.cargo_bin)
// Cargo detects config files based on `pwd` when running so
// to ensure user provided Cargo config files are used, it's
// critical to set the working directory to the manifest dir.
@@ -128,10 +227,19 @@
.context(format!(
"Error running cargo to fetch crates '{}'",
manifest_path.display()
- ))?
+ ))?;
+
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ bail!(format!(
+ "Failed to fetch crates for lockfile: {}",
+ output.status
+ ))
+ }
} else {
// Simply invoke `cargo generate-lockfile`
- Command::new(&self.cargo_bin)
+ let output = Command::new(&self.cargo_bin)
// Cargo detects config files based on `pwd` when running so
// to ensure user provided Cargo config files are used, it's
// critical to set the working directory to the manifest dir.
@@ -144,13 +252,13 @@
.context(format!(
"Error running cargo to generate lockfile '{}'",
manifest_path.display()
- ))?
- };
+ ))?;
- if !output.status.success() {
- eprintln!("{}", String::from_utf8_lossy(&output.stdout));
- eprintln!("{}", String::from_utf8_lossy(&output.stderr));
- bail!(format!("Failed to generate lockfile: {}", output.status))
+ if !output.status.success() {
+ eprintln!("{}", String::from_utf8_lossy(&output.stdout));
+ eprintln!("{}", String::from_utf8_lossy(&output.stderr));
+ bail!(format!("Failed to generate lockfile: {}", output.status))
+ }
}
cargo_lock::Lockfile::load(&generated_lockfile_path).context(format!(
@@ -222,20 +330,76 @@
/// A helper function for deserializing Cargo metadata and lockfiles
pub fn load_metadata(
metadata_path: &Path,
- lockfile_path: Option<&Path>,
) -> Result<(cargo_metadata::Metadata, cargo_lock::Lockfile)> {
+ // Locate the Cargo.lock file related to the metadata file.
+ let lockfile_path = metadata_path
+ .parent()
+ .expect("metadata files should always have parents")
+ .join("Cargo.lock");
+ if !lockfile_path.exists() {
+ bail!(
+ "The metadata file at {} is not next to a `Cargo.lock` file.",
+ metadata_path.display()
+ )
+ }
+
let content = fs::read_to_string(metadata_path)
.with_context(|| format!("Failed to load Cargo Metadata: {}", metadata_path.display()))?;
let metadata =
serde_json::from_str(&content).context("Unable to deserialize Cargo metadata")?;
- let lockfile_path = lockfile_path
- .map(PathBuf::from)
- .unwrap_or_else(|| metadata_path.parent().unwrap().join("Cargo.lock"));
-
let lockfile = cargo_lock::Lockfile::load(&lockfile_path)
.with_context(|| format!("Failed to load lockfile: {}", lockfile_path.display()))?;
Ok((metadata, lockfile))
}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ #[test]
+ fn deserialize_cargo_update_request_for_eager() {
+ for value in ["1", "yes", "true", "on"] {
+ let request = CargoUpdateRequest::from_str(value).unwrap();
+
+ assert_eq!(request, CargoUpdateRequest::Eager);
+ }
+ }
+
+ #[test]
+ fn deserialize_cargo_update_request_for_workspace() {
+ for value in ["workspace", "minimal"] {
+ let request = CargoUpdateRequest::from_str(value).unwrap();
+
+ assert_eq!(request, CargoUpdateRequest::Workspace);
+ }
+ }
+
+ #[test]
+ fn deserialize_cargo_update_request_for_package() {
+ let request = CargoUpdateRequest::from_str("cargo-bazel").unwrap();
+
+ assert_eq!(
+ request,
+ CargoUpdateRequest::Package {
+ name: "cargo-bazel".to_owned(),
+ version: None
+ }
+ );
+ }
+
+ #[test]
+ fn deserialize_cargo_update_request_for_precise() {
+ let request = CargoUpdateRequest::from_str("cargo-bazel@1.2.3").unwrap();
+
+ assert_eq!(
+ request,
+ CargoUpdateRequest::Package {
+ name: "cargo-bazel".to_owned(),
+ version: Some("1.2.3".to_owned())
+ }
+ );
+ }
+}
diff --git a/crate_universe/src/metadata/dependency.rs b/crate_universe/src/metadata/dependency.rs
index 105e4fe..7a98ae2 100644
--- a/crate_universe/src/metadata/dependency.rs
+++ b/crate_universe/src/metadata/dependency.rs
@@ -1,4 +1,5 @@
///! Gathering dependencies is the largest part of annotating.
+use anyhow::{bail, Result};
use cargo_metadata::{Metadata as CargoMetadata, Node, NodeDep, Package, PackageId};
use serde::{Deserialize, Serialize};
@@ -120,7 +121,8 @@
for dep in deps.into_iter() {
let dep_pkg = &metadata[&dep.pkg];
- let target_name = get_library_target_name(dep_pkg, &dep.name);
+ let target_name = get_library_target_name(dep_pkg, &dep.name)
+ .expect("Nodes Dependencies are expected to exclusively be library-like targets");
let alias = get_target_alias(&dep.name, dep_pkg);
for kind_info in &dep.dep_kinds {
@@ -190,25 +192,35 @@
.any(|id| id == &node_dep.pkg)
}
-fn get_library_target_name(package: &Package, potential_name: &str) -> String {
+fn get_library_target_name(package: &Package, potential_name: &str) -> Result<String> {
// If the potential name is not an alias in a dependent's package, a target's name
// should match which means we already know what the target library name is.
if package.targets.iter().any(|t| t.name == potential_name) {
- return potential_name.to_string();
+ return Ok(potential_name.to_string());
}
// Locate any library type targets
let lib_targets: Vec<&cargo_metadata::Target> = package
.targets
.iter()
- .filter(|t| t.kind.iter().any(|k| k == "lib" || k == "proc-macro"))
+ .filter(|t| {
+ t.kind
+ .iter()
+ .any(|k| k == "lib" || k == "rlib" || k == "proc-macro")
+ })
.collect();
// Only one target should be found
- assert_eq!(lib_targets.len(), 1);
+ if lib_targets.len() != 1 {
+ bail!(
+ "Unexpected number of 'library-like' targets found for {}: {:?}",
+ package.name,
+ package.targets
+ )
+ }
let target = lib_targets.into_iter().last().unwrap();
- target.name.clone()
+ Ok(target.name.clone())
}
/// The resolve graph (resolve.nodes[#].deps[#].name) of Cargo metadata uses module names
@@ -235,6 +247,124 @@
use crate::test::*;
+ #[test]
+ fn get_expected_lib_target_name() {
+ let mut package = mock_cargo_metadata_package();
+ package
+ .targets
+ .extend(vec![serde_json::from_value(serde_json::json!({
+ "name": "potential",
+ "kind": ["lib"],
+ "crate_types": [],
+ "required_features": [],
+ "src_path": "/tmp/mock.rs",
+ "edition": "2021",
+ "doctest": false,
+ "test": false,
+ "doc": false,
+ }))
+ .unwrap()]);
+
+ assert_eq!(
+ get_library_target_name(&package, "potential").unwrap(),
+ "potential"
+ );
+ }
+
+ #[test]
+ fn get_lib_target_name() {
+ let mut package = mock_cargo_metadata_package();
+ package
+ .targets
+ .extend(vec![serde_json::from_value(serde_json::json!({
+ "name": "lib_target",
+ "kind": ["lib"],
+ "crate_types": [],
+ "required_features": [],
+ "src_path": "/tmp/mock.rs",
+ "edition": "2021",
+ "doctest": false,
+ "test": false,
+ "doc": false,
+ }))
+ .unwrap()]);
+
+ assert_eq!(
+ get_library_target_name(&package, "mock-pkg").unwrap(),
+ "lib_target"
+ );
+ }
+
+ #[test]
+ fn get_rlib_target_name() {
+ let mut package = mock_cargo_metadata_package();
+ package
+ .targets
+ .extend(vec![serde_json::from_value(serde_json::json!({
+ "name": "rlib_target",
+ "kind": ["rlib"],
+ "crate_types": [],
+ "required_features": [],
+ "src_path": "/tmp/mock.rs",
+ "edition": "2021",
+ "doctest": false,
+ "test": false,
+ "doc": false,
+ }))
+ .unwrap()]);
+
+ assert_eq!(
+ get_library_target_name(&package, "mock-pkg").unwrap(),
+ "rlib_target"
+ );
+ }
+
+ #[test]
+ fn get_proc_macro_target_name() {
+ let mut package = mock_cargo_metadata_package();
+ package
+ .targets
+ .extend(vec![serde_json::from_value(serde_json::json!({
+ "name": "proc_macro_target",
+ "kind": ["proc-macro"],
+ "crate_types": [],
+ "required_features": [],
+ "src_path": "/tmp/mock.rs",
+ "edition": "2021",
+ "doctest": false,
+ "test": false,
+ "doc": false,
+ }))
+ .unwrap()]);
+
+ assert_eq!(
+ get_library_target_name(&package, "mock-pkg").unwrap(),
+ "proc_macro_target"
+ );
+ }
+
+ #[test]
+ fn get_bin_target_name() {
+ let mut package = mock_cargo_metadata_package();
+ package
+ .targets
+ .extend(vec![serde_json::from_value(serde_json::json!({
+ "name": "bin_target",
+ "kind": ["bin"],
+ "crate_types": [],
+ "required_features": [],
+ "src_path": "/tmp/mock.rs",
+ "edition": "2021",
+ "doctest": false,
+ "test": false,
+ "doc": false,
+ }))
+ .unwrap()]);
+
+ // It's an error for no library target to be found.
+ assert!(get_library_target_name(&package, "mock-pkg").is_err());
+ }
+
/// Locate the [cargo_metadata::Node] for the crate matching the given name
fn find_metadata_node<'a>(
name: &str,
diff --git a/crate_universe/src/metadata/metadata_annotation.rs b/crate_universe/src/metadata/metadata_annotation.rs
index be4cb7f..c045519 100644
--- a/crate_universe/src/metadata/metadata_annotation.rs
+++ b/crate_universe/src/metadata/metadata_annotation.rs
@@ -156,10 +156,11 @@
},
}
-/// TODO
+/// Additional information related to [Cargo.lock](https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html)
+/// data used for improved determinism.
#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Deserialize, Serialize)]
pub struct LockfileAnnotation {
- /// TODO
+ /// A mapping of crates/packages to additional source (network location) information.
pub crates: BTreeMap<PackageId, SourceAnnotation>,
}
diff --git a/crate_universe/src/rendering.rs b/crate_universe/src/rendering.rs
index a0570ba..c4cc249 100644
--- a/crate_universe/src/rendering.rs
+++ b/crate_universe/src/rendering.rs
@@ -211,13 +211,14 @@
use crate::config::{Config, CrateId, VendorMode};
use crate::context::crate_context::{CrateContext, Rule};
- use crate::context::{BuildScriptAttributes, Context, TargetAttributes};
+ use crate::context::{BuildScriptAttributes, CommonAttributes, Context, TargetAttributes};
use crate::metadata::Annotations;
use crate::test;
fn mock_render_config() -> RenderConfig {
serde_json::from_value(serde_json::json!({
- "repository_name": "test_rendering"
+ "repository_name": "test_rendering",
+ "regen_command": "cargo_bazel_regen_command",
}))
.unwrap()
}
@@ -467,4 +468,54 @@
// Local vendoring does not produce a `crates.bzl` file.
assert!(output.get(&PathBuf::from("crates.bzl")).is_none());
}
+
+ #[test]
+ fn duplicate_rustc_flags() {
+ let mut context = Context::default();
+ let crate_id = CrateId::new("mock_crate".to_owned(), "0.1.0".to_owned());
+
+ let rustc_flags = vec![
+ "-l".to_owned(),
+ "dylib=ssl".to_owned(),
+ "-l".to_owned(),
+ "dylib=crypto".to_owned(),
+ ];
+
+ context.crates.insert(
+ crate_id.clone(),
+ CrateContext {
+ name: crate_id.name,
+ version: crate_id.version,
+ targets: vec![Rule::Library(mock_target_attributes())],
+ common_attrs: CommonAttributes {
+ rustc_flags: rustc_flags.clone(),
+ ..CommonAttributes::default()
+ },
+ ..CrateContext::default()
+ },
+ );
+
+ // Enable local vendor mode
+ let config = RenderConfig {
+ vendor_mode: Some(VendorMode::Local),
+ ..mock_render_config()
+ };
+
+ let renderer = Renderer::new(config);
+ let output = renderer.render(&context).unwrap();
+
+ let build_file_content = output
+ .get(&PathBuf::from("BUILD.mock_crate-0.1.0.bazel"))
+ .unwrap();
+
+ // Strip all spaces from the generated BUILD file and ensure it has the flags
+ // represented by `rustc_flags` in the same order.
+ assert!(build_file_content.replace(' ', "").contains(
+ &rustc_flags
+ .iter()
+ .map(|s| format!("\"{}\",", s))
+ .collect::<Vec<String>>()
+ .join("\n")
+ ));
+ }
}
diff --git a/crate_universe/src/rendering/template_engine.rs b/crate_universe/src/rendering/template_engine.rs
index 792802f..3785b34 100644
--- a/crate_universe/src/rendering/template_engine.rs
+++ b/crate_universe/src/rendering/template_engine.rs
@@ -194,6 +194,7 @@
context.insert("default_select_dict", &SelectStringDict::default());
context.insert("repository_name", &render_config.repository_name);
context.insert("vendor_mode", &render_config.vendor_mode);
+ context.insert("regen_command", &render_config.regen_command);
context.insert("Null", &tera::Value::Null);
context.insert(
"default_package_name",
diff --git a/crate_universe/src/rendering/templates/module_bzl.j2 b/crate_universe/src/rendering/templates/module_bzl.j2
index 4ea1624..c3a1e6d 100644
--- a/crate_universe/src/rendering/templates/module_bzl.j2
+++ b/crate_universe/src/rendering/templates/module_bzl.j2
@@ -195,7 +195,10 @@
dependencies = _flatten_dependency_maps(all_dependency_maps).pop(package_name, None)
if not dependencies:
- return []
+ if dependencies == None:
+ fail("Tried to get all_crate_deps for package " + package_name + " but that package had no Cargo.toml file")
+ else:
+ return []
crate_deps = list(dependencies.pop(_COMMON_CONDITION, {}).values())
for condition, deps in dependencies.items():
diff --git a/crate_universe/src/rendering/templates/partials/crate/build_script.j2 b/crate_universe/src/rendering/templates/partials/crate/build_script.j2
index 45b97f7..9b1ff45 100644
--- a/crate_universe/src/rendering/templates/partials/crate/build_script.j2
+++ b/crate_universe/src/rendering/templates/partials/crate/build_script.j2
@@ -39,7 +39,14 @@
# warnings. For more details see:
# https://doc.rust-lang.org/rustc/lints/levels.html
"--cap-lints=allow",
- ] + {% set selectable = crate.build_script_attrs | get(key="rustc_flags", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
+ {%- if crate.common_attrs | get(key="rustc_flags", default=Null) %}
+
+ # User provided rustc_flags
+ {%- for rustc_flag in crate.common_attrs.rustc_flags %}
+ "{{ rustc_flag }}",
+ {%- endfor %}
+ {%- endif %}
+ ],
srcs = {% set glob = target.srcs %}{% include "partials/starlark/glob.j2" -%},
tools = {% set selectable = crate.build_script_attrs | get(key="tools", default=Null) %}{% include "partials/starlark/selectable_list.j2" %},
version = "{{ crate.common_attrs.version }}",
@@ -54,6 +61,13 @@
"noclippy",
"norustfmt",
],
+ {%- if crate.build_script_attrs | get(key="toolchains", default=Null) %}
+ toolchains = [
+ {%- for toolchain in crate.build_script_attrs.toolchains %}
+ "{{ toolchain }}",
+ {%- endfor %}
+ ],
+ {%- endif %}
visibility = ["//visibility:private"],
)
alias(
diff --git a/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2 b/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2
index a381f44..c1cccbb 100644
--- a/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2
+++ b/crate_universe/src/rendering/templates/partials/crate/common_attrs.j2
@@ -1,11 +1,11 @@
- compile_data = {% if crate.common_attrs | get(key="compile_data_glob") %}glob({{ crate.common_attrs.compile_data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.common_attrs | get(key="compile_data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
+ compile_data = {% if crate.common_attrs | get(key="compile_data_glob") %}glob(include = {{ crate.common_attrs.compile_data_glob | json_encode | safe }}, exclude = ["BUILD", "BUILD.bazel", "WORKSPACE", "WORKSPACE.bazel"]) + {% endif %}{% set selectable = crate.common_attrs | get(key="compile_data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
crate_root = "{{ target.crate_root }}",
crate_features = [
{%- for feature in crate.common_attrs | get(key="crate_features", default=[]) %}
"{{ feature }}",
{%- endfor %}
],
- data = {% if crate.common_attrs | get(key="data_glob") %}glob({{ crate.common_attrs.data_glob | json_encode | safe }}) + {% endif %}{% set selectable = crate.common_attrs | get(key="data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
+ data = {% if crate.common_attrs | get(key="data_glob") %}glob(include = {{ crate.common_attrs.data_glob | json_encode | safe }}, exclude = ["BUILD", "BUILD.bazel", "WORKSPACE", "WORKSPACE.bazel"]) + {% endif %}{% set selectable = crate.common_attrs | get(key="data", default=default_select_list) %}{% include "partials/starlark/selectable_list.j2" -%},
edition = "{{ crate.common_attrs.edition }}",
{%- if crate.common_attrs | get(key="linker_script", default=Null) %}
linker_script = "{{ crate.common_attrs.linker_script }}",
@@ -18,7 +18,14 @@
# warnings. For more details see:
# https://doc.rust-lang.org/rustc/lints/levels.html
"--cap-lints=allow",
- ] + {% set selectable = crate.common_attrs | get(key="rustc_flags", default=Null) %}{% include "partials/starlark/selectable_list.j2" -%},
+ {%- if crate.common_attrs | get(key="rustc_flags", default=Null) %}
+
+ # User provided rustc_flags
+ {%- for rustc_flag in crate.common_attrs.rustc_flags %}
+ "{{ rustc_flag }}",
+ {%- endfor %}
+ {%- endif %}
+ ],
srcs = {% set glob = target.srcs %}{% include "partials/starlark/glob.j2" -%},
version = "{{ crate.common_attrs.version }}",
tags = [
diff --git a/crate_universe/src/rendering/templates/partials/header.j2 b/crate_universe/src/rendering/templates/partials/header.j2
index 6f88e85..8e68e49 100644
--- a/crate_universe/src/rendering/templates/partials/header.j2
+++ b/crate_universe/src/rendering/templates/partials/header.j2
@@ -1,6 +1,7 @@
###############################################################################
# @generated
-# This file is auto-generated by the cargo-bazel tool.
+# DO NOT MODIFY: This file is auto-generated by a crate_universe tool. To
+# regenerate this file, run the following:
#
-# DO NOT MODIFY: Local changes may be replaced in future executions.
+# {{regen_command}}
###############################################################################
\ No newline at end of file
diff --git a/crate_universe/src/splicing.rs b/crate_universe/src/splicing.rs
index 0de1daa..8d42e1e 100644
--- a/crate_universe/src/splicing.rs
+++ b/crate_universe/src/splicing.rs
@@ -15,26 +15,15 @@
use serde::{Deserialize, Serialize};
use crate::config::CrateId;
-use crate::metadata::LockGenerator;
+use crate::metadata::{CargoUpdateRequest, LockGenerator};
use crate::utils::starlark::Label;
use self::cargo_config::CargoConfig;
pub use self::splicer::*;
-#[derive(Debug, Default, Serialize, Deserialize)]
-pub struct ExtraManifestInfo {
- // The path to a Cargo Manifest
- pub manifest: PathBuf,
-
- // The URL where the manifest's package can be downloaded
- pub url: String,
-
- // The Sha256 checksum of the downloaded package located at `url`.
- pub sha256: String,
-}
-
type DirectPackageManifest = BTreeMap<String, cargo_toml::DependencyDetail>;
+/// A collection of information used for splicing together a new Cargo manifest.
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
#[serde(deny_unknown_fields)]
pub struct SplicingManifest {
@@ -65,33 +54,35 @@
Self::from_str(&content).context("Failed to load SplicingManifest")
}
- pub fn absoulutize(self, relative_to: &Path) -> Self {
+ pub fn resolve(self, workspace_dir: &Path, output_base: &Path) -> Self {
let Self {
manifests,
cargo_config,
..
} = self;
+ let workspace_dir_str = workspace_dir.to_string_lossy();
+ let output_base_str = output_base.to_string_lossy();
+
// Ensure manifests all have absolute paths
let manifests = manifests
.into_iter()
.map(|(path, label)| {
- if !path.is_absolute() {
- let path = relative_to.join(path);
- (path, label)
- } else {
- (path, label)
- }
+ let resolved_path = path
+ .to_string_lossy()
+ .replace("${build_workspace_directory}", &workspace_dir_str)
+ .replace("${output_base}", &output_base_str);
+ (PathBuf::from(resolved_path), label)
})
.collect();
// Ensure the cargo config is located at an absolute path
let cargo_config = cargo_config.map(|path| {
- if !path.is_absolute() {
- relative_to.join(path)
- } else {
- path
- }
+ let resolved_path = path
+ .to_string_lossy()
+ .replace("${build_workspace_directory}", &workspace_dir_str)
+ .replace("${output_base}", &output_base_str);
+ PathBuf::from(resolved_path)
});
Self {
@@ -102,6 +93,7 @@
}
}
+/// The result of fully resolving a [SplicingManifest] in preparation for splicing.
#[derive(Debug, Serialize, Default)]
pub struct SplicingMetadata {
/// A set of all packages directly written to the rule
@@ -147,32 +139,6 @@
}
}
-/// A collection of information required for reproducible "extra worksspace members".
-#[derive(Debug, Default, Serialize, Deserialize)]
-#[serde(deny_unknown_fields)]
-pub struct ExtraManifestsManifest {
- pub manifests: Vec<ExtraManifestInfo>,
-}
-
-impl FromStr for ExtraManifestsManifest {
- type Err = serde_json::Error;
-
- fn from_str(s: &str) -> Result<Self, Self::Err> {
- serde_json::from_str(s)
- }
-}
-
-impl ExtraManifestsManifest {
- pub fn try_from_path<T: AsRef<Path>>(path: T) -> Result<Self> {
- let content = fs::read_to_string(path.as_ref())?;
- Self::from_str(&content).context("Failed to load ExtraManifestsManifest")
- }
-
- pub fn absoulutize(self) -> Self {
- self
- }
-}
-
#[derive(Debug, Default, Serialize, Deserialize, Clone)]
pub struct SourceInfo {
/// A url where to a `.crate` file.
@@ -227,30 +193,9 @@
impl WorkspaceMetadata {
fn new(
splicing_manifest: &SplicingManifest,
- extra_manifests_manifest: &ExtraManifestsManifest,
- injected_manifests: HashMap<&PathBuf, String>,
+ member_manifests: HashMap<&PathBuf, String>,
) -> Result<Self> {
- let mut sources = BTreeMap::new();
-
- for config in extra_manifests_manifest.manifests.iter() {
- let package = match read_manifest(&config.manifest) {
- Ok(manifest) => match manifest.package {
- Some(pkg) => pkg,
- None => continue,
- },
- Err(e) => return Err(e),
- };
-
- let id = CrateId::new(package.name, package.version);
- let info = SourceInfo {
- url: config.url.clone(),
- sha256: config.sha256.clone(),
- };
-
- sources.insert(id, info);
- }
-
- let mut package_prefixes: BTreeMap<String, String> = injected_manifests
+ let mut package_prefixes: BTreeMap<String, String> = member_manifests
.iter()
.filter_map(|(original_manifest, cargo_pkg_name)| {
let label = match splicing_manifest.manifests.get(*original_manifest) {
@@ -285,7 +230,7 @@
.collect();
Ok(Self {
- sources,
+ sources: BTreeMap::new(),
workspace_prefix,
package_prefixes,
})
@@ -469,6 +414,7 @@
existing_lock: &Option<PathBuf>,
cargo_bin: &Path,
rustc_bin: &Path,
+ update_request: &Option<CargoUpdateRequest>,
) -> Result<cargo_lock::Lockfile> {
let manifest_dir = manifest_path
.as_path_buf()
@@ -484,7 +430,7 @@
// Generate the new lockfile
let lockfile = LockGenerator::new(PathBuf::from(cargo_bin), PathBuf::from(rustc_bin))
- .generate(manifest_path.as_path_buf(), existing_lock)?;
+ .generate(manifest_path.as_path_buf(), existing_lock, update_request)?;
// Write the lockfile to disk
if !root_lockfile_path.exists() {
@@ -493,3 +439,107 @@
Ok(lockfile)
}
+
+#[cfg(test)]
+mod test {
+ use super::*;
+
+ use std::path::PathBuf;
+
+ #[test]
+ fn deserialize_splicing_manifest() {
+ let runfiles = runfiles::Runfiles::create().unwrap();
+ let path = runfiles.rlocation(
+ "rules_rust/crate_universe/test_data/serialized_configs/splicing_manifest.json",
+ );
+
+ let content = std::fs::read_to_string(path).unwrap();
+
+ let manifest: SplicingManifest = serde_json::from_str(&content).unwrap();
+
+ // Check manifests
+ assert_eq!(
+ manifest.manifests,
+ BTreeMap::from([
+ (
+ PathBuf::from("${build_workspace_directory}/submod/Cargo.toml"),
+ Label::from_str("//submod:Cargo.toml").unwrap()
+ ),
+ (
+ PathBuf::from("${output_base}/external_crate/Cargo.toml"),
+ Label::from_str("@external_crate//:Cargo.toml").unwrap()
+ ),
+ (
+ PathBuf::from("/tmp/abs/path/workspace/Cargo.toml"),
+ Label::from_str("//:Cargo.toml").unwrap()
+ ),
+ ])
+ );
+
+ // Check splicing configs
+ assert_eq!(manifest.resolver_version, cargo_toml::Resolver::V2);
+
+ // Check packages
+ assert_eq!(manifest.direct_packages.len(), 1);
+ let package = manifest.direct_packages.get("rand").unwrap();
+ assert_eq!(
+ package,
+ &cargo_toml::DependencyDetail {
+ default_features: Some(false),
+ features: vec!["small_rng".to_owned()],
+ version: Some("0.8.5".to_owned()),
+ ..Default::default()
+ }
+ );
+
+ // Check cargo config
+ assert_eq!(
+ manifest.cargo_config,
+ Some(PathBuf::from("/tmp/abs/path/workspace/.cargo/config.toml"))
+ );
+ }
+
+ #[test]
+ fn splicing_manifest_resolve() {
+ let runfiles = runfiles::Runfiles::create().unwrap();
+ let path = runfiles.rlocation(
+ "rules_rust/crate_universe/test_data/serialized_configs/splicing_manifest.json",
+ );
+
+ let content = std::fs::read_to_string(path).unwrap();
+
+ let mut manifest: SplicingManifest = serde_json::from_str(&content).unwrap();
+ manifest.cargo_config = Some(PathBuf::from(
+ "${build_workspace_directory}/.cargo/config.toml",
+ ));
+ manifest = manifest.resolve(
+ &PathBuf::from("/tmp/abs/path/workspace"),
+ &PathBuf::from("/tmp/output_base"),
+ );
+
+ // Check manifests
+ assert_eq!(
+ manifest.manifests,
+ BTreeMap::from([
+ (
+ PathBuf::from("/tmp/abs/path/workspace/submod/Cargo.toml"),
+ Label::from_str("//submod:Cargo.toml").unwrap()
+ ),
+ (
+ PathBuf::from("/tmp/output_base/external_crate/Cargo.toml"),
+ Label::from_str("@external_crate//:Cargo.toml").unwrap()
+ ),
+ (
+ PathBuf::from("/tmp/abs/path/workspace/Cargo.toml"),
+ Label::from_str("//:Cargo.toml").unwrap()
+ ),
+ ])
+ );
+
+ // Check cargo config
+ assert_eq!(
+ manifest.cargo_config.unwrap(),
+ PathBuf::from("/tmp/abs/path/workspace/.cargo/config.toml"),
+ )
+ }
+}
diff --git a/crate_universe/src/splicing/splicer.rs b/crate_universe/src/splicing/splicer.rs
index 5e3ef27..8374df4 100644
--- a/crate_universe/src/splicing/splicer.rs
+++ b/crate_universe/src/splicing/splicer.rs
@@ -5,16 +5,15 @@
use std::path::{Path, PathBuf};
use anyhow::{bail, Context, Result};
+use cargo_metadata::MetadataCommand;
use cargo_toml::{Dependency, Manifest};
+use normpath::PathExt;
use crate::config::CrateId;
use crate::splicing::{SplicedManifest, SplicingManifest};
use crate::utils::starlark::Label;
-use super::{
- read_manifest, DirectPackageManifest, ExtraManifestInfo, ExtraManifestsManifest,
- WorkspaceMetadata,
-};
+use super::{read_manifest, DirectPackageManifest, WorkspaceMetadata};
/// The core splicer implementation. Each style of Bazel workspace should be represented
/// here and a splicing implementation defined.
@@ -24,7 +23,6 @@
path: &'a PathBuf,
manifest: &'a Manifest,
splicing_manifest: &'a SplicingManifest,
- extra_manifests_manifest: &'a ExtraManifestsManifest,
},
/// Splice a manifest for a single package. This includes cases where
/// were defined directly in Bazel.
@@ -32,13 +30,11 @@
path: &'a PathBuf,
manifest: &'a Manifest,
splicing_manifest: &'a SplicingManifest,
- extra_manifests_manifest: &'a ExtraManifestsManifest,
},
/// Splice a manifest from multiple disjoint Cargo manifests.
MultiPackage {
manifests: &'a HashMap<PathBuf, Manifest>,
splicing_manifest: &'a SplicingManifest,
- extra_manifests_manifest: &'a ExtraManifestsManifest,
},
}
@@ -49,7 +45,7 @@
pub fn new(
manifests: &'a HashMap<PathBuf, Manifest>,
splicing_manifest: &'a SplicingManifest,
- extra_manifests_manifest: &'a ExtraManifestsManifest,
+ cargo: &Path,
) -> Result<Self> {
// First check for any workspaces in the provided manifests
let workspace_owned: HashMap<&PathBuf, &Manifest> = manifests
@@ -73,7 +69,33 @@
bail!("When splicing manifests, there can only be 1 root workspace manifest");
}
+ // This is an error case - we've detected some manifests are in a workspace, but can't
+ // find it.
+ // This block is just for trying to give as useful an error message as possible in this
+ // case.
+ if workspace_roots.is_empty() {
+ let sorted_manifests: BTreeSet<_> = manifests.keys().collect();
+ for manifest_path in sorted_manifests {
+ let metadata_result = MetadataCommand::new()
+ .cargo_path(cargo)
+ .current_dir(manifest_path.parent().unwrap())
+ .manifest_path(manifest_path)
+ .no_deps()
+ .exec();
+ if let Ok(metadata) = metadata_result {
+ let label = Label::from_absolute_path(
+ metadata.workspace_root.join("Cargo.toml").as_std_path(),
+ );
+ if let Ok(label) = label {
+ bail!("Missing root workspace manifest. Please add the following label to the `manifests` key: \"{}\"", label);
+ }
+ }
+ }
+ bail!("Missing root workspace manifest. Please add the label of the workspace root to the `manifests` key");
+ }
+
// Ensure all workspace owned manifests are members of the one workspace root
+ // UNWRAP: Safe because we've checked workspace_roots isn't empty.
let (root_manifest_path, root_manifest) = workspace_roots.drain().last().unwrap();
let external_workspace_members: BTreeSet<String> = workspace_packages
.into_iter()
@@ -87,38 +109,22 @@
bail!("A package was provided that appears to be a part of another workspace.\nworkspace root: '{}'\nexternal packages: {:#?}", root_manifest_path.display(), external_workspace_members)
}
- // Ensure all workspace members are present for the given workspace
- let workspace_members = root_manifest.workspace.as_ref().unwrap().members.clone();
- let missing_manifests: BTreeSet<String> = workspace_members
- .into_iter()
- .filter(|member| {
- // Check for any members that are missing from the list of manifests
- !manifests.keys().any(|path| {
- let path_str = path.to_string_lossy().to_string();
- // Account for windows paths.
- let path_str = path_str.replace("\\", "/");
- // Workspace members are represented as directories.
- path_str.trim_end_matches("/Cargo.toml").ends_with(member)
+ // UNWRAP: Safe because a Cargo.toml file must have a parent directory.
+ let root_manifest_dir = root_manifest_path.parent().unwrap();
+ let missing_manifests = Self::find_missing_manifests(
+ root_manifest,
+ root_manifest_dir,
+ &manifests
+ .keys()
+ .map(|p| {
+ p.normalize()
+ .with_context(|| format!("Failed to normalize path {:?}", p))
})
- })
- .filter_map(|path_str| {
- // UNWRAP: Safe because a Cargo.toml file must have a parent directory.
- let cargo_manifest_dir = root_manifest_path.parent().unwrap();
- let label = Label::from_absolute_path(
- &cargo_manifest_dir.join(path_str).join("Cargo.toml"),
- );
- match label {
- Ok(label) => Some(label.to_string()),
- Err(err) => {
- eprintln!("Failed to identify label for missing manifest: {}", err);
- None
- }
- }
- })
- .collect();
-
+ .collect::<Result<_, _>>()?,
+ )
+ .context("Identifying missing manifests")?;
if !missing_manifests.is_empty() {
- bail!("Some manifests are not being tracked. Please add the following labels to the `manifests` key: {:#?}", missing_manifests)
+ bail!("Some manifests are not being tracked. Please add the following labels to the `manifests` key: {:#?}", missing_manifests);
}
root_workspace_pair = Some((root_manifest_path, root_manifest));
@@ -129,7 +135,6 @@
path,
manifest,
splicing_manifest,
- extra_manifests_manifest,
})
} else if manifests.len() == 1 {
let (path, manifest) = manifests.iter().last().unwrap();
@@ -137,17 +142,52 @@
path,
manifest,
splicing_manifest,
- extra_manifests_manifest,
})
} else {
Ok(Self::MultiPackage {
manifests,
splicing_manifest,
- extra_manifests_manifest,
})
}
}
+ fn find_missing_manifests(
+ root_manifest: &Manifest,
+ root_manifest_dir: &Path,
+ known_manifest_paths: &BTreeSet<normpath::BasePathBuf>,
+ ) -> Result<BTreeSet<String>> {
+ let workspace_manifest_paths = root_manifest
+ .workspace
+ .as_ref()
+ .unwrap()
+ .members
+ .iter()
+ .map(|member| {
+ let path = root_manifest_dir.join(member).join("Cargo.toml");
+ path.normalize()
+ .with_context(|| format!("Failed to normalize path {:?}", path))
+ })
+ .collect::<Result<BTreeSet<normpath::BasePathBuf>, _>>()?;
+
+ // Ensure all workspace members are present for the given workspace
+ workspace_manifest_paths
+ .into_iter()
+ .filter(|workspace_manifest_path| {
+ !known_manifest_paths.contains(workspace_manifest_path)
+ })
+ .map(|workspace_manifest_path| {
+ let label = Label::from_absolute_path(workspace_manifest_path.as_path())
+ .with_context(|| {
+ format!(
+ "Failed to identify label for path {:?}",
+ workspace_manifest_path
+ )
+ })?;
+ Ok(label.to_string())
+ })
+ .collect()
+ }
+
/// Performs splicing based on the current variant.
pub fn splice(&self, workspace_dir: &Path) -> Result<SplicedManifest> {
match self {
@@ -155,76 +195,45 @@
path,
manifest,
splicing_manifest,
- extra_manifests_manifest,
- } => Self::splice_workspace(
- workspace_dir,
- path,
- manifest,
- splicing_manifest,
- extra_manifests_manifest,
- ),
+ } => Self::splice_workspace(workspace_dir, path, manifest, splicing_manifest),
SplicerKind::Package {
path,
manifest,
splicing_manifest,
- extra_manifests_manifest,
- } => Self::splice_package(
- workspace_dir,
- path,
- manifest,
- splicing_manifest,
- extra_manifests_manifest,
- ),
+ } => Self::splice_package(workspace_dir, path, manifest, splicing_manifest),
SplicerKind::MultiPackage {
manifests,
splicing_manifest,
- extra_manifests_manifest,
- } => Self::splice_multi_package(
- workspace_dir,
- manifests,
- splicing_manifest,
- extra_manifests_manifest,
- ),
+ } => Self::splice_multi_package(workspace_dir, manifests, splicing_manifest),
}
}
+ /// Implementation for splicing Cargo workspaces
fn splice_workspace(
workspace_dir: &Path,
path: &&PathBuf,
manifest: &&Manifest,
splicing_manifest: &&SplicingManifest,
- extra_manifests_manifest: &&ExtraManifestsManifest,
) -> Result<SplicedManifest> {
let mut manifest = (*manifest).clone();
let manifest_dir = path
.parent()
.expect("Every manifest should havee a parent directory");
- let extra_workspace_manifests =
- Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
-
// Link the sources of the root manifest into the new workspace
symlink_roots(manifest_dir, workspace_dir, Some(IGNORE_LIST))?;
// Optionally install the cargo config after contents have been symlinked
Self::setup_cargo_config(&splicing_manifest.cargo_config, workspace_dir)?;
- // Add additional workspace members to the new manifest
- let mut installations = Self::inject_workspace_members(
- &mut manifest,
- &extra_workspace_manifests,
- workspace_dir,
- )?;
-
// Add any additional depeendencies to the root package
Self::inject_direct_packages(&mut manifest, &splicing_manifest.direct_packages)?;
let root_manifest_path = workspace_dir.join("Cargo.toml");
- installations.insert(path, String::new());
+ let member_manifests = HashMap::from([(*path, String::new())]);
// Write the generated metadata to the manifest
- let workspace_metadata =
- WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ let workspace_metadata = WorkspaceMetadata::new(splicing_manifest, member_manifests)?;
workspace_metadata.inject_into(&mut manifest)?;
// Write the root manifest
@@ -233,20 +242,17 @@
Ok(SplicedManifest::Workspace(root_manifest_path))
}
+ /// Implementation for splicing individual Cargo packages
fn splice_package(
workspace_dir: &Path,
path: &&PathBuf,
manifest: &&Manifest,
splicing_manifest: &&SplicingManifest,
- extra_manifests_manifest: &&ExtraManifestsManifest,
) -> Result<SplicedManifest> {
let manifest_dir = path
.parent()
.expect("Every manifest should havee a parent directory");
- let extra_workspace_manifests =
- Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
-
// Link the sources of the root manifest into the new workspace
symlink_roots(manifest_dir, workspace_dir, Some(IGNORE_LIST))?;
@@ -260,22 +266,14 @@
default_cargo_workspace_manifest(&splicing_manifest.resolver_version).workspace
}
- // Add additional workspace members to the new manifest
- let mut installations = Self::inject_workspace_members(
- &mut manifest,
- &extra_workspace_manifests,
- workspace_dir,
- )?;
-
// Add any additional depeendencies to the root package
Self::inject_direct_packages(&mut manifest, &splicing_manifest.direct_packages)?;
let root_manifest_path = workspace_dir.join("Cargo.toml");
- installations.insert(path, String::new());
+ let member_manifests = HashMap::from([(*path, String::new())]);
// Write the generated metadata to the manifest
- let workspace_metadata =
- WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ let workspace_metadata = WorkspaceMetadata::new(splicing_manifest, member_manifests)?;
workspace_metadata.inject_into(&mut manifest)?;
// Write the root manifest
@@ -284,37 +282,22 @@
Ok(SplicedManifest::Package(root_manifest_path))
}
+ /// Implementation for splicing together multiple Cargo packages/workspaces
fn splice_multi_package(
workspace_dir: &Path,
manifests: &&HashMap<PathBuf, Manifest>,
splicing_manifest: &&SplicingManifest,
- extra_manifests_manifest: &&ExtraManifestsManifest,
) -> Result<SplicedManifest> {
let mut manifest = default_cargo_workspace_manifest(&splicing_manifest.resolver_version);
// Optionally install a cargo config file into the workspace root.
Self::setup_cargo_config(&splicing_manifest.cargo_config, workspace_dir)?;
- let extra_workspace_manifests =
- Self::get_extra_workspace_manifests(&extra_manifests_manifest.manifests)?;
-
- let manifests: HashMap<PathBuf, Manifest> = manifests
- .iter()
- .map(|(p, m)| (p.to_owned(), m.to_owned()))
- .collect();
-
- let all_manifests = manifests
- .iter()
- .chain(extra_workspace_manifests.iter())
- .map(|(k, v)| (k.clone(), v.clone()))
- .collect();
-
let installations =
- Self::inject_workspace_members(&mut manifest, &all_manifests, workspace_dir)?;
+ Self::inject_workspace_members(&mut manifest, manifests, workspace_dir)?;
// Write the generated metadata to the manifest
- let workspace_metadata =
- WorkspaceMetadata::new(splicing_manifest, extra_manifests_manifest, installations)?;
+ let workspace_metadata = WorkspaceMetadata::new(splicing_manifest, installations)?;
workspace_metadata.inject_into(&mut manifest)?;
// Add any additional depeendencies to the root package
@@ -327,38 +310,9 @@
Ok(SplicedManifest::MultiPackage(root_manifest_path))
}
- /// Extract the set of extra workspace member manifests such that it matches
- /// how other manifests are passed when creating a new [SplicerKind].
- fn get_extra_workspace_manifests(
- extra_manifests: &[ExtraManifestInfo],
- ) -> Result<HashMap<PathBuf, Manifest>> {
- extra_manifests
- .iter()
- .map(|config| match read_manifest(&config.manifest) {
- Ok(manifest) => Ok((config.manifest.clone(), manifest)),
- Err(err) => Err(err),
- })
- .collect()
- }
-
/// A helper for installing Cargo config files into the spliced workspace while also
/// ensuring no other linked config file is available
fn setup_cargo_config(cargo_config_path: &Option<PathBuf>, workspace_dir: &Path) -> Result<()> {
- // Make sure no other config files exist
- for config in vec![
- workspace_dir.join("config"),
- workspace_dir.join("config.toml"),
- ] {
- if config.exists() {
- fs::remove_file(&config).with_context(|| {
- format!(
- "Failed to delete existing cargo config: {}",
- config.display()
- )
- })?;
- }
- }
-
// If the `.cargo` dir is a symlink, we'll need to relink it and ensure
// a Cargo config file is omitted
let dot_cargo_dir = workspace_dir.join(".cargo");
@@ -383,12 +337,53 @@
dot_cargo_dir.join("config.toml"),
] {
if config.exists() {
- fs::remove_file(&config)?;
+ remove_symlink(&config).with_context(|| {
+ format!(
+ "Failed to delete existing cargo config: {}",
+ config.display()
+ )
+ })?;
}
}
}
}
+ // Make sure no other config files exist
+ for config in vec![
+ workspace_dir.join("config"),
+ workspace_dir.join("config.toml"),
+ dot_cargo_dir.join("config"),
+ dot_cargo_dir.join("config.toml"),
+ ] {
+ if config.exists() {
+ remove_symlink(&config).with_context(|| {
+ format!(
+ "Failed to delete existing cargo config: {}",
+ config.display()
+ )
+ })?;
+ }
+ }
+
+ // Ensure no parent directory also has a cargo config
+ let mut current_parent = workspace_dir.parent();
+ while let Some(parent) = current_parent {
+ let dot_cargo_dir = parent.join(".cargo");
+ for config in vec![
+ dot_cargo_dir.join("config.toml"),
+ dot_cargo_dir.join("config"),
+ ] {
+ if config.exists() {
+ bail!(
+ "A Cargo config file was found in a parent directory to the current workspace. This is not allowed because these settings will leak into your Bazel build but will not be reproducible on other machines.\nWorkspace = {}\nCargo config = {}",
+ workspace_dir.display(),
+ config.display(),
+ )
+ }
+ }
+ current_parent = parent.parent()
+ }
+
// Install the new config file after having removed all others
if let Some(cargo_config_path) = cargo_config_path {
if !dot_cargo_dir.exists() {
@@ -480,15 +475,10 @@
workspace_dir: PathBuf,
manifests: HashMap<PathBuf, Manifest>,
splicing_manifest: SplicingManifest,
- extra_manifests_manifest: ExtraManifestsManifest,
}
impl Splicer {
- pub fn new(
- workspace_dir: PathBuf,
- splicing_manifest: SplicingManifest,
- extra_manifests_manifest: ExtraManifestsManifest,
- ) -> Result<Self> {
+ pub fn new(workspace_dir: PathBuf, splicing_manifest: SplicingManifest) -> Result<Self> {
// Load all manifests
let manifests = splicing_manifest
.manifests
@@ -504,18 +494,13 @@
workspace_dir,
manifests,
splicing_manifest,
- extra_manifests_manifest,
})
}
/// Build a new workspace root
- pub fn splice_workspace(&self) -> Result<SplicedManifest> {
- SplicerKind::new(
- &self.manifests,
- &self.splicing_manifest,
- &self.extra_manifests_manifest,
- )?
- .splice(&self.workspace_dir)
+ pub fn splice_workspace(&self, cargo: &Path) -> Result<SplicedManifest> {
+ SplicerKind::new(&self.manifests, &self.splicing_manifest, cargo)?
+ .splice(&self.workspace_dir)
}
}
@@ -719,8 +704,8 @@
use std::str::FromStr;
use cargo_metadata::{MetadataCommand, PackageId};
+ use maplit::btreeset;
- use crate::splicing::ExtraManifestInfo;
use crate::utils::starlark::Label;
/// Clone and compare two items after calling `.sort()` on them.
@@ -750,6 +735,10 @@
(PathBuf::from("cargo"), PathBuf::from("rustc"))
}
+ fn cargo() -> PathBuf {
+ get_cargo_and_rustc_paths().0
+ }
+
fn generate_metadata(manifest_path: &Path) -> cargo_metadata::Metadata {
let manifest_dir = manifest_path.parent().unwrap_or_else(|| {
panic!(
@@ -790,16 +779,28 @@
}
fn mock_cargo_toml(path: &Path, name: &str) -> cargo_toml::Manifest {
+ mock_cargo_toml_with_dependencies(path, name, &[])
+ }
+
+ fn mock_cargo_toml_with_dependencies(
+ path: &Path,
+ name: &str,
+ deps: &[&str],
+ ) -> cargo_toml::Manifest {
let manifest = cargo_toml::Manifest::from_str(&textwrap::dedent(&format!(
r#"
[package]
- name = "{}"
+ name = "{name}"
version = "0.0.1"
[lib]
path = "lib.rs"
+
+ [dependencies]
+ {dependencies}
"#,
- name
+ name = name,
+ dependencies = deps.join("\n")
)))
.unwrap();
@@ -809,23 +810,6 @@
manifest
}
- fn mock_extra_manifest_digest(cache_dir: &Path) -> ExtraManifestsManifest {
- ExtraManifestsManifest {
- manifests: vec![{
- let manifest_path = cache_dir.join("extra_pkg").join("Cargo.toml");
- mock_cargo_toml(&manifest_path, "extra_pkg");
-
- ExtraManifestInfo {
- manifest: manifest_path,
- url: "https://crates.io/".to_owned(),
- sha256: "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
- .to_owned(),
- }
- }],
- }
- }
-
- /// This json object is tightly coupled to [mock_extra_manifest_digest]
fn mock_workspace_metadata(
include_extra_member: bool,
workspace_prefix: Option<&str>,
@@ -870,7 +854,12 @@
.join("root_pkg")
.join(pkg)
.join("Cargo.toml");
- mock_cargo_toml(&manifest_path, pkg);
+ let deps = if pkg == &"sub_pkg_b" {
+ vec![r#"sub_pkg_a = { path = "../sub_pkg_a" }"#]
+ } else {
+ vec![]
+ };
+ mock_cargo_toml_with_dependencies(&manifest_path, pkg, &deps);
splicing_manifest.manifests.insert(
manifest_path,
@@ -903,6 +892,9 @@
let manifest_path = root_pkg.join("Cargo.toml");
fs::create_dir_all(&manifest_path.parent().unwrap()).unwrap();
fs::write(&manifest_path, toml::to_string(&manifest).unwrap()).unwrap();
+ {
+ File::create(root_pkg.join("BUILD.bazel")).unwrap();
+ }
let sub_pkg_a = root_pkg.join("sub_pkg_a");
let sub_pkg_b = root_pkg.join("sub_pkg_b");
@@ -916,7 +908,7 @@
splicing_manifest.manifests.insert(
manifest_path,
- Label::from_str("//pkg_root:Cargo.toml").unwrap(),
+ Label::from_str("//root_pkg:Cargo.toml").unwrap(),
);
(splicing_manifest, cache_dir)
@@ -1015,7 +1007,7 @@
// On windows, make sure we normalize the path to match what Cargo would
// otherwise use to populate metadata.
if cfg!(target_os = "windows") {
- workspace_root = format!("/{}", workspace_root.replace("\\", "/"))
+ workspace_root = format!("/{}", workspace_root.replace('\\', "/"))
};
if is_root {
@@ -1035,14 +1027,11 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
// Ensure metadata is valid
let metadata = generate_metadata(workspace_manifest.as_path_buf());
@@ -1071,14 +1060,11 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
// Ensure metadata is valid
let metadata = generate_metadata(workspace_manifest.as_path_buf());
@@ -1108,18 +1094,15 @@
// Remove everything but the root manifest
splicing_manifest
.manifests
- .retain(|_, label| *label == Label::from_str("//pkg_root:Cargo.toml").unwrap());
+ .retain(|_, label| *label == Label::from_str("//root_pkg:Cargo.toml").unwrap());
assert_eq!(splicing_manifest.manifests.len(), 1);
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo());
assert!(workspace_manifest.is_err());
@@ -1133,6 +1116,33 @@
}
#[test]
+ fn splice_workspace_report_missing_root() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace();
+
+ // Remove everything but the root manifest
+ splicing_manifest
+ .manifests
+ .retain(|_, label| *label != Label::from_str("//root_pkg:Cargo.toml").unwrap());
+ assert_eq!(splicing_manifest.manifests.len(), 2);
+
+ // Splice the workspace
+ let workspace_root = tempfile::tempdir().unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo());
+
+ assert!(workspace_manifest.is_err());
+
+ // Ensure both the missing manifests are mentioned in the error string
+ let err_str = format!("{:?}", &workspace_manifest);
+ assert!(
+ err_str.contains("Missing root workspace manifest")
+ && err_str.contains("//root_pkg:Cargo.toml")
+ );
+ }
+
+ #[test]
fn splice_workspace_report_external_workspace_members() {
let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace();
@@ -1168,13 +1178,10 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo());
assert!(workspace_manifest.is_err());
@@ -1194,14 +1201,11 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
// Ensure metadata is valid
let metadata = generate_metadata(workspace_manifest.as_path_buf());
@@ -1226,14 +1230,11 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
// Check the default resolver version
let cargo_manifest = cargo_toml::Manifest::from_str(
@@ -1278,14 +1279,11 @@
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- ExtraManifestsManifest::default(),
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_manifest =
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
// Check the specified resolver version
let cargo_manifest = cargo_toml::Manifest::from_str(
@@ -1322,122 +1320,292 @@
}
#[test]
- fn extra_workspace_member_with_package() {
- let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_package();
+ fn cargo_config_setup() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace_in_root();
- // Add the extra workspace member
- let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+ // Write a cargo config
+ let temp_dir = tempfile::tempdir().unwrap();
+ let external_config = temp_dir.as_ref().join("config.toml");
+ fs::write(&external_config, "# Cargo configuration file").unwrap();
+ splicing_manifest.cargo_config = Some(external_config);
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- extra_manifests_manifest,
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
- // Ensure metadata is valid
- let metadata = generate_metadata(workspace_manifest.as_path_buf());
- assert_sort_eq!(
- metadata.workspace_members,
- vec![
- new_package_id("extra_pkg", workspace_root.as_ref(), false),
- new_package_id("root_pkg", workspace_root.as_ref(), true),
- ]
- );
-
- // Ensure the workspace metadata annotations are populated
+ let cargo_config = workspace_root.as_ref().join(".cargo").join("config.toml");
+ assert!(cargo_config.exists());
assert_eq!(
- metadata.workspace_metadata,
- mock_workspace_metadata(true, None)
+ fs::read_to_string(cargo_config).unwrap().trim(),
+ "# Cargo configuration file"
);
-
- // Ensure lockfile was successfully spliced
- cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
}
#[test]
- fn extra_workspace_member_with_workspace() {
- let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_workspace();
+ fn unregistered_cargo_config_replaced() {
+ let (mut splicing_manifest, cache_dir) = mock_splicing_manifest_with_workspace_in_root();
- // Add the extra workspace member
- let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+ // Generate a cargo config that is not tracked by the splicing manifest
+ fs::create_dir_all(cache_dir.as_ref().join(".cargo")).unwrap();
+ fs::write(
+ cache_dir.as_ref().join(".cargo").join("config.toml"),
+ "# Untracked Cargo configuration file",
+ )
+ .unwrap();
+
+ // Write a cargo config
+ let temp_dir = tempfile::tempdir().unwrap();
+ let external_config = temp_dir.as_ref().join("config.toml");
+ fs::write(&external_config, "# Cargo configuration file").unwrap();
+ splicing_manifest.cargo_config = Some(external_config);
// Splice the workspace
let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- extra_manifests_manifest,
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ Splicer::new(workspace_root.as_ref().to_path_buf(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo())
+ .unwrap();
- // Ensure metadata is valid
- let metadata = generate_metadata(workspace_manifest.as_path_buf());
- assert_sort_eq!(
- metadata.workspace_members,
- vec![
- new_package_id("sub_pkg_a", workspace_root.as_ref(), false),
- new_package_id("sub_pkg_b", workspace_root.as_ref(), false),
- new_package_id("extra_pkg", workspace_root.as_ref(), false),
- new_package_id("root_pkg", workspace_root.as_ref(), true),
- ]
- );
-
- // Ensure the workspace metadata annotations are populated
+ let cargo_config = workspace_root.as_ref().join(".cargo").join("config.toml");
+ assert!(cargo_config.exists());
assert_eq!(
- metadata.workspace_metadata,
- mock_workspace_metadata(true, Some("pkg_root"))
+ fs::read_to_string(cargo_config).unwrap().trim(),
+ "# Cargo configuration file"
);
-
- // Ensure lockfile was successfully spliced
- cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
}
#[test]
- fn extra_workspace_member_with_multi_package() {
- let (splicing_manifest, cache_dir) = mock_splicing_manifest_with_multi_package();
+ fn error_on_cargo_config_in_parent() {
+ let (mut splicing_manifest, _cache_dir) = mock_splicing_manifest_with_workspace_in_root();
- // Add the extra workspace member
- let extra_manifests_manifest = mock_extra_manifest_digest(cache_dir.as_ref());
+ // Write a cargo config
+ let temp_dir = tempfile::tempdir().unwrap();
+ let dot_cargo_dir = temp_dir.as_ref().join(".cargo");
+ fs::create_dir_all(&dot_cargo_dir).unwrap();
+ let external_config = dot_cargo_dir.join("config.toml");
+ fs::write(&external_config, "# Cargo configuration file").unwrap();
+ splicing_manifest.cargo_config = Some(external_config.clone());
// Splice the workspace
- let workspace_root = tempfile::tempdir().unwrap();
- let workspace_manifest = Splicer::new(
- workspace_root.as_ref().to_path_buf(),
- splicing_manifest,
- extra_manifests_manifest,
- )
- .unwrap()
- .splice_workspace()
- .unwrap();
+ let workspace_root = temp_dir.as_ref().join("workspace_root");
+ let splicing_result = Splicer::new(workspace_root.clone(), splicing_manifest)
+ .unwrap()
+ .splice_workspace(&cargo());
- // Ensure metadata is valid
- let metadata = generate_metadata(workspace_manifest.as_path_buf());
- assert_sort_eq!(
- metadata.workspace_members,
- vec![
- new_package_id("pkg_a", workspace_root.as_ref(), false),
- new_package_id("pkg_b", workspace_root.as_ref(), false),
- new_package_id("pkg_c", workspace_root.as_ref(), false),
- new_package_id("extra_pkg", workspace_root.as_ref(), false),
- // Multi package renderings always add a root package
- new_package_id("direct-cargo-bazel-deps", workspace_root.as_ref(), true),
+ // Ensure cargo config files in parent directories lead to errors
+ assert!(splicing_result.is_err());
+ let err_str = splicing_result.err().unwrap().to_string();
+ assert!(err_str.starts_with("A Cargo config file was found in a parent directory"));
+ assert!(err_str.contains(&format!("Workspace = {}", workspace_root.display())));
+ assert!(err_str.contains(&format!("Cargo config = {}", external_config.display())));
+ }
+
+ #[test]
+ fn find_missing_manifests_correct_without_root() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let root_manifest_dir = temp_dir.path();
+ touch(&root_manifest_dir.join("WORKSPACE.bazel"));
+ touch(&root_manifest_dir.join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("Cargo.toml"));
+ touch(&root_manifest_dir.join("foo").join("Cargo.toml"));
+ touch(&root_manifest_dir.join("bar").join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("bar").join("Cargo.toml"));
+
+ let known_manifest_paths = btreeset![
+ root_manifest_dir
+ .join("foo")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ root_manifest_dir
+ .join("bar")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ ];
+
+ let root_manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ "foo",
+ "bar",
]
- );
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
- // Ensure the workspace metadata annotations are populated
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+ let missing_manifests = SplicerKind::find_missing_manifests(
+ &root_manifest,
+ root_manifest_dir,
+ &known_manifest_paths,
+ )
+ .unwrap();
+ assert_eq!(missing_manifests, btreeset![]);
+ }
+
+ #[test]
+ fn find_missing_manifests_correct_with_root() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let root_manifest_dir = temp_dir.path();
+ touch(&root_manifest_dir.join("WORKSPACE.bazel"));
+ touch(&root_manifest_dir.join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("Cargo.toml"));
+ touch(&root_manifest_dir.join("foo").join("Cargo.toml"));
+ touch(&root_manifest_dir.join("bar").join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("bar").join("Cargo.toml"));
+
+ let known_manifest_paths = btreeset![
+ root_manifest_dir.join("Cargo.toml").normalize().unwrap(),
+ root_manifest_dir
+ .join("foo")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ root_manifest_dir
+ .join("bar")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ ];
+
+ let root_manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ ".",
+ "foo",
+ "bar",
+ ]
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+ let missing_manifests = SplicerKind::find_missing_manifests(
+ &root_manifest,
+ root_manifest_dir,
+ &known_manifest_paths,
+ )
+ .unwrap();
+ assert_eq!(missing_manifests, btreeset![]);
+ }
+
+ #[test]
+ fn find_missing_manifests_missing_root() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let root_manifest_dir = temp_dir.path();
+ touch(&root_manifest_dir.join("WORKSPACE.bazel"));
+ touch(&root_manifest_dir.join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("Cargo.toml"));
+ touch(&root_manifest_dir.join("foo").join("Cargo.toml"));
+ touch(&root_manifest_dir.join("bar").join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("bar").join("Cargo.toml"));
+
+ let known_manifest_paths = btreeset![
+ root_manifest_dir
+ .join("foo")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ root_manifest_dir
+ .join("bar")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ ];
+
+ let root_manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ ".",
+ "foo",
+ "bar",
+ ]
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+ let missing_manifests = SplicerKind::find_missing_manifests(
+ &root_manifest,
+ root_manifest_dir,
+ &known_manifest_paths,
+ )
+ .unwrap();
+ assert_eq!(missing_manifests, btreeset![String::from("//:Cargo.toml")]);
+ }
+
+ #[test]
+ fn find_missing_manifests_missing_nonroot() {
+ let temp_dir = tempfile::tempdir().unwrap();
+ let root_manifest_dir = temp_dir.path();
+ touch(&root_manifest_dir.join("WORKSPACE.bazel"));
+ touch(&root_manifest_dir.join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("Cargo.toml"));
+ touch(&root_manifest_dir.join("foo").join("Cargo.toml"));
+ touch(&root_manifest_dir.join("bar").join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("bar").join("Cargo.toml"));
+ touch(&root_manifest_dir.join("baz").join("BUILD.bazel"));
+ touch(&root_manifest_dir.join("baz").join("Cargo.toml"));
+
+ let known_manifest_paths = btreeset![
+ root_manifest_dir
+ .join("foo")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ root_manifest_dir
+ .join("bar")
+ .join("Cargo.toml")
+ .normalize()
+ .unwrap(),
+ ];
+
+ let root_manifest: cargo_toml::Manifest = toml::toml! {
+ [workspace]
+ members = [
+ "foo",
+ "bar",
+ "baz",
+ ]
+ [package]
+ name = "root_pkg"
+ version = "0.0.1"
+
+ [lib]
+ path = "lib.rs"
+ }
+ .try_into()
+ .unwrap();
+ let missing_manifests = SplicerKind::find_missing_manifests(
+ &root_manifest,
+ root_manifest_dir,
+ &known_manifest_paths,
+ )
+ .unwrap();
assert_eq!(
- metadata.workspace_metadata,
- mock_workspace_metadata(true, None)
+ missing_manifests,
+ btreeset![String::from("//baz:Cargo.toml")]
);
+ }
- // Ensure lockfile was successfully spliced
- cargo_lock::Lockfile::load(workspace_root.as_ref().join("Cargo.lock")).unwrap();
+ fn touch(path: &Path) {
+ std::fs::create_dir_all(path.parent().unwrap()).unwrap();
+ std::fs::write(path, &[]).unwrap();
}
}
diff --git a/crate_universe/src/utils/starlark/label.rs b/crate_universe/src/utils/starlark/label.rs
index 1716944..a21c81e 100644
--- a/crate_universe/src/utils/starlark/label.rs
+++ b/crate_universe/src/utils/starlark/label.rs
@@ -7,6 +7,8 @@
use serde::de::Visitor;
use serde::{Deserialize, Serialize, Serializer};
+// Note that this type assumes there's no such thing as a relative label;
+// `:foo` is assumed to be relative to the repo root, and parses out to equivalent to `//:foo`.
#[derive(Debug, Default, PartialEq, Eq, PartialOrd, Ord, Clone)]
pub struct Label {
pub repository: Option<String>,
@@ -52,19 +54,21 @@
impl Display for Label {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- let mut label = String::new();
-
// Add the repository
if let Some(repo) = &self.repository {
- label = format!("@{}", repo);
+ write!(f, "@{}", repo)?;
}
+ write!(f, "//")?;
+
// Add the package
if let Some(pkg) = &self.package {
- label = format!("{}//{}", label, pkg);
+ write!(f, "{}", pkg)?;
}
- write!(f, "{}:{}", &label, &self.target,)
+ write!(f, ":{}", self.target)?;
+
+ Ok(())
}
}
@@ -190,6 +194,7 @@
#[test]
fn full_label() {
let label = Label::from_str("@repo//package/sub_package:target").unwrap();
+ assert_eq!(label.to_string(), "@repo//package/sub_package:target");
assert_eq!(label.repository.unwrap(), "repo");
assert_eq!(label.package.unwrap(), "package/sub_package");
assert_eq!(label.target, "target");
@@ -198,6 +203,7 @@
#[test]
fn no_repository() {
let label = Label::from_str("//package:target").unwrap();
+ assert_eq!(label.to_string(), "//package:target");
assert_eq!(label.repository, None);
assert_eq!(label.package.unwrap(), "package");
assert_eq!(label.target, "target");
@@ -206,6 +212,7 @@
#[test]
fn no_slashes() {
let label = Label::from_str("package:target").unwrap();
+ assert_eq!(label.to_string(), "//package:target");
assert_eq!(label.repository, None);
assert_eq!(label.package.unwrap(), "package");
assert_eq!(label.target, "target");
@@ -214,6 +221,7 @@
#[test]
fn root_label() {
let label = Label::from_str("@repo//:target").unwrap();
+ assert_eq!(label.to_string(), "@repo//:target");
assert_eq!(label.repository.unwrap(), "repo");
assert_eq!(label.package, None);
assert_eq!(label.target, "target");
@@ -222,6 +230,7 @@
#[test]
fn root_label_no_repository() {
let label = Label::from_str("//:target").unwrap();
+ assert_eq!(label.to_string(), "//:target");
assert_eq!(label.repository, None);
assert_eq!(label.package, None);
assert_eq!(label.target, "target");
@@ -230,6 +239,7 @@
#[test]
fn root_label_no_slashes() {
let label = Label::from_str(":target").unwrap();
+ assert_eq!(label.to_string(), "//:target");
assert_eq!(label.repository, None);
assert_eq!(label.package, None);
assert_eq!(label.target, "target");
@@ -238,6 +248,10 @@
#[test]
fn full_label_with_slash_after_colon() {
let label = Label::from_str("@repo//package/sub_package:subdir/target").unwrap();
+ assert_eq!(
+ label.to_string(),
+ "@repo//package/sub_package:subdir/target"
+ );
assert_eq!(label.repository.unwrap(), "repo");
assert_eq!(label.package.unwrap(), "package/sub_package");
assert_eq!(label.target, "subdir/target");