Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Middle-ground vendoring option using a local registry #46

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
38 changes: 38 additions & 0 deletions src/buck.rs
Original file line number Diff line number Diff line change
Expand Up @@ -448,6 +448,38 @@ impl Serialize for HttpArchive {
}
}

#[derive(Debug)]
pub struct ExtractArchive {
pub name: Name,
pub src: BuckPath,
pub strip_prefix: String,
pub sub_targets: BTreeSet<BuckPath>,
pub visibility: Visibility,
pub sort_key: Name,
}

impl Serialize for ExtractArchive {
fn serialize<S: Serializer>(&self, ser: S) -> Result<S::Ok, S::Error> {
let Self {
name,
src,
strip_prefix,
sub_targets,
visibility,
sort_key: _,
} = self;
let mut map = ser.serialize_map(None)?;
map.serialize_entry("name", name)?;
map.serialize_entry("src", src)?;
map.serialize_entry("strip_prefix", strip_prefix)?;
if !sub_targets.is_empty() {
map.serialize_entry("sub_targets", sub_targets)?;
}
map.serialize_entry("visibility", visibility)?;
map.end()
}
}

#[derive(Debug)]
pub struct GitFetch {
pub name: Name,
Expand Down Expand Up @@ -1025,6 +1057,7 @@ impl Serialize for PrebuiltCxxLibrary {
pub enum Rule {
Alias(Alias),
Filegroup(Filegroup),
ExtractArchive(ExtractArchive),
HttpArchive(HttpArchive),
GitFetch(GitFetch),
Binary(RustBinary),
Expand Down Expand Up @@ -1066,6 +1099,7 @@ fn rule_sort_key(rule: &Rule) -> impl Ord + '_ {
// Make the alias rule come before the actual rule. Note that aliases
// emitted by reindeer are always to a target within the same package.
Rule::Alias(Alias { actual, .. }) => RuleSortKey::Other(actual, 0),
Rule::ExtractArchive(ExtractArchive { sort_key, .. }) => RuleSortKey::Other(sort_key, 1),
Rule::HttpArchive(HttpArchive { sort_key, .. }) => RuleSortKey::Other(sort_key, 1),
Rule::GitFetch(GitFetch { name, .. }) => RuleSortKey::GitFetch(name),
Rule::Filegroup(_)
Expand All @@ -1091,6 +1125,7 @@ impl Rule {
Rule::Alias(Alias { name, .. })
| Rule::Filegroup(Filegroup { name, .. })
| Rule::HttpArchive(HttpArchive { name, .. })
| Rule::ExtractArchive(ExtractArchive { name, .. })
| Rule::GitFetch(GitFetch { name, .. })
| Rule::Binary(RustBinary {
common:
Expand Down Expand Up @@ -1143,6 +1178,9 @@ impl Rule {
Rule::Filegroup(filegroup) => {
FunctionCall::new(&config.filegroup, filegroup).serialize(Serializer)
}
Rule::ExtractArchive(compressed_crate) => {
FunctionCall::new(&config.extract_archive, compressed_crate).serialize(Serializer)
}
Rule::HttpArchive(http_archive) => {
FunctionCall::new(&config.http_archive, http_archive).serialize(Serializer)
}
Expand Down
79 changes: 67 additions & 12 deletions src/buckify.rs
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ use crate::buck;
use crate::buck::Alias;
use crate::buck::BuckPath;
use crate::buck::Common;
use crate::buck::ExtractArchive;
use crate::buck::Filegroup;
use crate::buck::GitFetch;
use crate::buck::HttpArchive;
Expand All @@ -56,6 +57,7 @@ use crate::cargo::Source;
use crate::cargo::TargetReq;
use crate::collection::SetOrMap;
use crate::config::Config;
use crate::config::VendorConfig;
use crate::fixups::ExportSources;
use crate::fixups::Fixups;
use crate::glob::Globs;
Expand Down Expand Up @@ -219,7 +221,7 @@ fn generate_rules<'scope>(
for rule in rules {
let _ = rule_tx.send(Ok(rule));
}
if context.config.vendor.is_none() {
if context.config.vendor.is_not_source() {
deps.push((pkg, TargetReq::Sources));
}
}
Expand Down Expand Up @@ -258,10 +260,22 @@ fn generate_nonvendored_sources_archive<'scope>(

match &lockfile_package.source {
Source::Local => Ok(None),
Source::CratesIo => generate_http_archive(context, pkg, lockfile_package).map(Some),
Source::CratesIo => match context.config.vendor {
VendorConfig::Off => generate_http_archive(context, pkg, lockfile_package).map(Some),
VendorConfig::LocalRegistry => {
generate_extract_archive(context, pkg, lockfile_package).map(Some)
}
VendorConfig::Source(_) => unreachable!(),
},
Source::Git {
repo, commit_hash, ..
} => generate_git_fetch(repo, commit_hash).map(Some),
} => match context.config.vendor {
VendorConfig::Off => generate_git_fetch(repo, commit_hash).map(Some),
VendorConfig::LocalRegistry => {
generate_extract_archive(context, pkg, lockfile_package).map(Some)
}
VendorConfig::Source(_) => unreachable!(),
},
Source::Unrecognized(_) => {
bail!(
"`vendor = false` mode is supported only with exclusively crates.io and https git dependencies. \"{}\" {} is coming from some other source",
Expand All @@ -272,6 +286,25 @@ fn generate_nonvendored_sources_archive<'scope>(
}
}

fn generate_extract_archive<'scope>(
_context: &'scope RuleContext<'scope>,
pkg: &'scope Manifest,
_lockfile_package: &LockfilePackage,
) -> anyhow::Result<Rule> {
let vendordir = "vendor";
Ok(Rule::ExtractArchive(ExtractArchive {
name: Name(format!("{}-{}.crate", pkg.name, pkg.version)),
src: BuckPath(PathBuf::from(format!(
"{vendordir}/{}-{}.crate",
pkg.name, pkg.version
))),
strip_prefix: format!("{}-{}", pkg.name, pkg.version),
sub_targets: BTreeSet::new(), // populated later after all fixups are constructed
visibility: Visibility::Private,
sort_key: Name(format!("{}-{}", pkg.name, pkg.version)),
}))
}

fn generate_http_archive<'scope>(
context: &'scope RuleContext<'scope>,
pkg: &'scope Manifest,
Expand Down Expand Up @@ -414,8 +447,10 @@ fn generate_target_rules<'scope>(

let manifest_dir = pkg.manifest_dir();
let mapped_manifest_dir =
if context.config.vendor.is_some() || matches!(pkg.source, Source::Local) {
if context.config.vendor.is_source() || matches!(pkg.source, Source::Local) {
relative_path(&paths.third_party_dir, manifest_dir)
} else if let VendorConfig::LocalRegistry = context.config.vendor {
PathBuf::from(format!("{}-{}.crate", pkg.name, pkg.version))
} else if let Source::Git { repo, .. } = &pkg.source {
let git_fetch = short_name_for_git_repo(repo)?;
let repository_root = find_repository_root(manifest_dir)?;
Expand All @@ -428,7 +463,7 @@ fn generate_target_rules<'scope>(
let edition = tgt.edition.unwrap_or(pkg.edition);

let mut licenses = BTreeSet::new();
if config.vendor.is_none() {
if !config.vendor.is_source() {
// The `licenses` attribute takes `attrs.source()` which is the file
// containing the custom license text. For `vendor = false` mode, we
// don't have such a file on disk, and we don't have a Buck label either
Expand Down Expand Up @@ -458,7 +493,7 @@ fn generate_target_rules<'scope>(
// filename, or a list of globs.
// If we're configured to get precise sources and we're using 2018+ edition source, then
// parse the crate to see what files are actually used.
let mut srcs = if (config.vendor.is_some() || matches!(pkg.source, Source::Local))
let mut srcs = if (config.vendor.is_source() || matches!(pkg.source, Source::Local))
&& fixups.precise_srcs()
&& edition >= Edition::Rust2018
{
Expand Down Expand Up @@ -504,7 +539,7 @@ fn generate_target_rules<'scope>(
)
.context("rustc_flags")?;

if config.vendor.is_some() || matches!(pkg.source, Source::Local) {
if config.vendor.is_source() || matches!(pkg.source, Source::Local) {
unzip_platform(
config,
&mut base,
Expand All @@ -516,6 +551,10 @@ fn generate_target_rules<'scope>(
fixups.compute_srcs(srcs)?,
)
.context("srcs")?;
} else if let VendorConfig::LocalRegistry = config.vendor {
let http_archive_target = format!(":{}-{}.crate", pkg.name, pkg.version);
base.srcs
.insert(BuckPath(PathBuf::from(http_archive_target)));
} else if let Source::Git { repo, .. } = &pkg.source {
let short_name = short_name_for_git_repo(repo)?;
let git_fetch_target = format!(":{}.git", short_name);
Expand Down Expand Up @@ -870,7 +909,7 @@ fn generate_target_rules<'scope>(
// For non-disk sources (i.e. non-vendor mode git_fetch and
// http_archive), `srcs` and `exclude` are ignored because
// we can't look at the files to match globs.
let srcs = if config.vendor.is_some() || matches!(pkg.source, Source::Local) {
let srcs = if config.vendor.is_source() || matches!(pkg.source, Source::Local) {
// e.g. {"src/lib.rs": "vendor/foo-1.0.0/src/lib.rs"}
let mut globs = Globs::new(srcs, exclude).context("export sources")?;
let srcs = globs
Expand All @@ -884,6 +923,14 @@ fn generate_target_rules<'scope>(
globs.check_all_globs_used()?;
}
srcs
} else if let VendorConfig::LocalRegistry = config.vendor {
// e.g. {":foo-1.0.0.git": "foo-1.0.0"}
let http_archive_target = format!(":{}-{}.crate", pkg.name, pkg.version);
[(
BuckPath(mapped_manifest_dir.clone()),
SubtargetOrPath::Path(BuckPath(PathBuf::from(http_archive_target))),
)]
.into()
} else if let Source::Git { repo, .. } = &pkg.source {
// e.g. {":foo-123.git": "foo-123"}
let short_name = short_name_for_git_repo(repo)?;
Expand Down Expand Up @@ -985,7 +1032,7 @@ fn buckify_for_universe(

// Fill in all http_archive rules with all the sub_targets which got
// mentioned by fixups.
if config.vendor.is_none() {
if !config.vendor.is_source() {
let mut need_subtargets = HashMap::<Name, BTreeSet<BuckPath>>::new();
let mut insert = |subtarget_or_path: &SubtargetOrPath| {
if let SubtargetOrPath::Subtarget(subtarget) = subtarget_or_path {
Expand Down Expand Up @@ -1026,10 +1073,18 @@ fn buckify_for_universe(
rules = rules
.into_iter()
.map(|mut rule| {
if let Rule::HttpArchive(rule) = &mut rule {
if let Some(need_subtargets) = need_subtargets.remove(&rule.name) {
rule.sub_targets = need_subtargets;
match &mut rule {
Rule::HttpArchive(rule) => {
if let Some(need_subtargets) = need_subtargets.remove(&rule.name) {
rule.sub_targets = need_subtargets;
}
}
Rule::ExtractArchive(rule) => {
if let Some(need_subtargets) = need_subtargets.remove(&rule.name) {
rule.sub_targets = need_subtargets;
}
}
_ => (),
}
rule
})
Expand Down
2 changes: 1 addition & 1 deletion src/cargo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ pub fn cargo_get_lockfile_and_metadata(

let cargo_home;
let lockfile;
if config.vendor.is_none() {
if config.vendor.is_not_source() {
cargo_home = None;

// Whether or not there is a Cargo.lock already, do not read it yet.
Expand Down
62 changes: 48 additions & 14 deletions src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -77,11 +77,8 @@ pub struct Config {
#[serde(default)]
pub buck: BuckConfig,

#[serde(
default = "default_vendor_config",
deserialize_with = "deserialize_vendor_config"
)]
pub vendor: Option<VendorConfig>,
#[serde(default, deserialize_with = "deserialize_vendor_config")]
pub vendor: VendorConfig,

#[serde(default)]
pub audit: AuditConfig,
Expand Down Expand Up @@ -131,6 +128,8 @@ pub struct BuckConfig {
/// Rule name for http_archive
#[serde(default)]
pub http_archive: StringWithDefault<MustBe!("http_archive")>,
#[serde(default)]
pub extract_archive: StringWithDefault<MustBe!("extract_archive")>,
/// Rule name for git_fetch
#[serde(default)]
pub git_fetch: StringWithDefault<MustBe!("git_fetch")>,
Expand All @@ -153,9 +152,26 @@ pub struct BuckConfig {
pub buildscript_genrule: StringWithDefault<MustBe!("buildscript_run")>,
}

#[derive(Debug, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub enum VendorConfig {
Off,
LocalRegistry,
Source(VendorSourceConfig),
}
impl VendorConfig {
pub(crate) fn is_source(&self) -> bool {
matches!(self, Self::Source(_))
}

pub(crate) fn is_not_source(&self) -> bool {
!self.is_source()
}
}

#[derive(Debug, Default, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct VendorConfig {
pub struct VendorSourceConfig {
/// List of .gitignore files to use to filter checksum files, relative to
/// this config file.
#[serde(default)]
Expand All @@ -165,6 +181,12 @@ pub struct VendorConfig {
pub checksum_exclude: HashSet<String>,
}

impl Default for VendorConfig {
fn default() -> Self {
VendorConfig::Source(Default::default())
}
}

#[derive(Debug, Default, Clone, Deserialize)]
#[serde(deny_unknown_fields)]
pub struct AuditConfig {
Expand Down Expand Up @@ -247,10 +269,6 @@ impl<T> From<String> for StringWithDefault<T> {
}
}

fn default_vendor_config() -> Option<VendorConfig> {
Some(VendorConfig::default())
}

fn default_platforms() -> HashMap<PlatformName, PlatformConfig> {
const DEFAULT_PLATFORMS_TOML: &str = include_str!("default_platforms.toml");

Expand All @@ -270,14 +288,14 @@ fn default_universes() -> BTreeMap<UniverseName, UniverseConfig> {
map
}

fn deserialize_vendor_config<'de, D>(deserializer: D) -> Result<Option<VendorConfig>, D::Error>
fn deserialize_vendor_config<'de, D>(deserializer: D) -> Result<VendorConfig, D::Error>
where
D: Deserializer<'de>,
{
struct VendorConfigVisitor;

impl<'de> Visitor<'de> for VendorConfigVisitor {
type Value = Option<VendorConfig>;
type Value = VendorConfig;

fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
formatter.write_str("[vendor] section, or `vendor = false`")
Expand All @@ -289,14 +307,30 @@ where
{
// `vendor = true`: default configuration with vendoring.
// `vendor = false`: do not vendor.
Ok(value.then(VendorConfig::default))
Ok(if value {
VendorConfig::default()
} else {
VendorConfig::Off
})
}

fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if v == "local-registry" {
Ok(VendorConfig::LocalRegistry)
} else {
Err(E::custom("unknown vendor type"))
}
}

fn visit_map<M>(self, map: M) -> Result<Self::Value, M::Error>
where
M: MapAccess<'de>,
{
VendorConfig::deserialize(MapAccessDeserializer::new(map)).map(Some)
VendorSourceConfig::deserialize(MapAccessDeserializer::new(map))
.map(VendorConfig::Source)
}
}

Expand Down
Loading