Skip to content

Commit

Permalink
use the "current" large repo version when forward syncing
Browse files Browse the repository at this point in the history
Summary:
This diff gets us to whole purpose of this stack. Before when we were forward-syncing a commit from small to large we always looked at the version used for syncing the parent commit from small and used the same one. This caused 2 problems:
* if the config version in the large repo changed since then we'd always fail and manual intervention would be needed to recover us from that situation. There are two things we're doing wrong that are causing this error:
   * we are using wrong version - if the master is using a newer sync mapping version that's the one that's "current" and should be used
   * we are rewriting on top of the wrong parent: the commit that parent remaps to is one of many commits in large repo that are "working copy equivalent" to it, in fact every commit in the common branch since last sync is "equivalent" but only the ones coming after the version change are sound candidates for parent if we want to sync with new version
* if the last sync was a while ago - let's say we didn't have a push to small since a week such push could be rejected as we have a maximum pushrebase distance we don't pushrebase over.

This diff fixes both by rewriting the synced small commit parent to the latest version of large repo target bookmark and uses a version associated with it. It does so only if that commit has a version associated with it and is newer than the commit the small repo commit parent remaps to.

NOTE: For now to minimize blast radius of this diff we artificially limit its application to the first bulletpoint

Reviewed By: markbt, gustavoavena

Differential Revision: D54877757

fbshipit-source-id: ea69204449e1fc5e14fda8b1ee8a6270c2fd2c9a
  • Loading branch information
mitrandir77 authored and facebook-github-bot committed Mar 17, 2024
1 parent a251ee0 commit 70865ba
Show file tree
Hide file tree
Showing 7 changed files with 198 additions and 50 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -303,6 +303,10 @@ where
/// for a (small repo -> large repo) pair.
///
/// Validation that the version is applicable is done by the caller.
///
/// Optional parent mapping allows to override the selection for the parent when rewriting small
/// repo commit to large repo before pushrebasing it. Such commit must be one of the commits with
/// working copy equivalent to the small repo commit.
pub async fn unsafe_sync_commit_pushrebase<'a>(
&'a self,
ctx: &'a CoreContext,
Expand All @@ -312,6 +316,7 @@ where
rewritedates: PushrebaseRewriteDates,
version: CommitSyncConfigVersion,
change_mapping_version: Option<CommitSyncConfigVersion>,
parent_mapping: HashMap<ChangesetId, ChangesetId>,
) -> Result<Option<ChangesetId>, Error> {
let source_cs_id = source_cs.get_changeset_id();
let before = Instant::now();
Expand All @@ -323,6 +328,7 @@ where
rewritedates,
version,
change_mapping_version,
parent_mapping,
)
.await;
let elapsed = before.elapsed();
Expand Down Expand Up @@ -882,6 +888,7 @@ where
rewritedates: PushrebaseRewriteDates,
version: CommitSyncConfigVersion,
change_mapping_version: Option<CommitSyncConfigVersion>,
parent_mapping: HashMap<ChangesetId, ChangesetId>,
) -> Result<Option<ChangesetId>, Error> {
let hash = source_cs.get_changeset_id();
let (source_repo, target_repo) = self.get_source_target();
Expand Down Expand Up @@ -924,6 +931,17 @@ where
let remapped_parents =
remap_parents(ctx, &source_cs_mut, self, parent_selection_hint).await?;

let remapped_parents = remapped_parents
.into_iter()
.map(|(source_parent, target_parent)| {
if let Some(new_target) = parent_mapping.get(&target_parent) {
(source_parent, *new_target)
} else {
(source_parent, target_parent)
}
})
.collect();

let small_repo = self.get_small_repo();
let x_repo_submodule_metadata_file_prefix =
get_x_repo_submodule_metadata_file_prefx_from_config(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ use anyhow::anyhow;
use anyhow::format_err;
use anyhow::Error;
use anyhow::Result;
use bookmarks::BookmarkKey;
use borrowed::borrowed;
use cacheblob::InProcessLease;
use cacheblob::LeaseOps;
Expand Down Expand Up @@ -192,6 +193,8 @@ pub(crate) async fn remap_parents<'a, M: SyncedCommitMapping + Clone + 'static,
pub struct SyncedAncestorsVersions {
// Versions of all synced ancestors
pub versions: HashSet<CommitSyncConfigVersion>,
// Rewritten ancestors
pub rewritten_ancestors: HashMap<ChangesetId, CommitSyncConfigVersion>,
}

impl SyncedAncestorsVersions {
Expand Down Expand Up @@ -280,8 +283,11 @@ where
synced_ancestors_versions.versions.insert(version);
}
RewrittenAs(cs_ids_versions) => {
for (_, version) in cs_ids_versions {
synced_ancestors_versions.versions.insert(version);
for (cs_id, version) in cs_ids_versions {
synced_ancestors_versions.versions.insert(version.clone());
synced_ancestors_versions
.rewritten_ancestors
.insert(cs_id, version);
}
}
EquivalentWorkingCopyAncestor(_, version) => {
Expand Down Expand Up @@ -362,7 +368,7 @@ where
.await?;

// Get the config versions from all synced ancestors
let synced_ancestors_versions = stream::iter(&synced_ancestors_frontier)
let synced_ancestors_list = stream::iter(&synced_ancestors_frontier)
.then(|cs_id| {
borrowed!(ctx, commit_syncer);

Expand All @@ -375,11 +381,12 @@ where
Some(plural) => {
use PluralCommitSyncOutcome::*;
match plural {
NotSyncCandidate(version) => Ok(vec![version]),
RewrittenAs(cs_ids_versions) => {
Ok(cs_ids_versions.into_iter().map(|(_, v)| v).collect())
}
EquivalentWorkingCopyAncestor(_, version) => Ok(vec![version]),
NotSyncCandidate(version) => Ok(vec![(None, version)]),
RewrittenAs(cs_ids_versions) => Ok(cs_ids_versions
.into_iter()
.map(|(cs_id, v)| (Some(cs_id), v))
.collect()),
EquivalentWorkingCopyAncestor(_, version) => Ok(vec![(None, version)]),
}
}
None => Err(anyhow!("Failed to get config version from synced ancestor")),
Expand All @@ -390,7 +397,17 @@ where
.await?
.into_iter()
.flatten()
.collect::<HashSet<_>>();
.collect::<Vec<_>>();

let synced_ancestors_versions = synced_ancestors_list
.iter()
.cloned()
.map(|(_, v)| v)
.collect();
let rewritten_ancestors = synced_ancestors_list
.into_iter()
.filter_map(|(maybe_cs_id, version)| maybe_cs_id.map(|cs_id| (cs_id, version)))
.collect();

// Get the oldest unsynced ancestors by getting the difference between the
// ancestors from the starting changeset and its synced ancestors.
Expand All @@ -405,10 +422,100 @@ where
commits_to_sync,
SyncedAncestorsVersions {
versions: synced_ancestors_versions,
rewritten_ancestors,
},
))
}

/// Finds what's the "current" version for large repo (it may have been updated since last
/// pushrebase), and returns the version and the mapping of the synced ancestors to the
/// more-up-to-date changesets with equivalent working copy id.
///
/// This is written with assumption of no diamond merges (which are not supported by other parts of
/// x_repo_sync) and that small repo bookmark is never moving backwards (which is not supported by
/// other pieces of the infra).
pub async fn get_version_and_parent_map_for_sync_via_pushrebase<
'a,
M: SyncedCommitMapping + Clone + 'static,
R,
>(
ctx: &'a CoreContext,
commit_syncer: &CommitSyncer<M, R>,
target_bookmark: &Target<BookmarkKey>,
parent_version: CommitSyncConfigVersion,
synced_ancestors_versions: &SyncedAncestorsVersions,
) -> Result<(CommitSyncConfigVersion, HashMap<ChangesetId, ChangesetId>), Error>
where
R: Repo,
{
// Killswitch to disable this logic alltogether.
if let Ok(true) = justknobs::eval(
"scm/mononoke:xrepo_disable_forward_sync_over_mapping_change",
None,
None,
) {
return Ok((parent_version, HashMap::new()));
}
let target_repo = commit_syncer.get_target_repo();
// Value for the target bookmark. This is not a part of transaction and we're ok with the fact
// it might be a bit stale.
let target_bookmark_csid = target_repo
.bookmarks()
.get(ctx.clone(), &target_bookmark.0)
.await?
.ok_or_else(|| anyhow!("Bookmark {} does not exist", target_bookmark.0))?;

let target_bookmark_version = if let Some(target_bookmark_version) = target_repo
.repo_cross_repo()
.synced_commit_mapping()
.get_large_repo_commit_version(ctx, target_repo.repo_identity().id(), target_bookmark_csid)
.await?
{
target_bookmark_version
} else {
// If we don't have a version for the target bookmark, we can't do anything.
return Ok((parent_version, HashMap::new()));
};

if parent_version == target_bookmark_version {
// If the parent version is the same as the target bookmark version we don't neet
// to be smart: we can just use the parent version.
return Ok((parent_version, HashMap::new()));
}

// Let's see if we can find a better ancestor to use for the sync.
let mut parent_mapping = HashMap::new();
for (parent, _version) in synced_ancestors_versions.rewritten_ancestors.iter() {
// If the bookmark value is descendant of our parent it should have equivalent working
// copy.
// TODO(mitrandir): we could actually trigger a sync and validate such equivalence instead
// of assuming it.
if target_repo
.commit_graph()
.is_ancestor(ctx, *parent, target_bookmark_csid)
.await?
{
parent_mapping.insert(*parent, target_bookmark_csid);
}
}

if parent_mapping.is_empty() {
// None of the parents are ancestors of current position of target_bookmark. Perhaps
// our view of target bookmark is stale. It's better to avoid changing version.
Ok((parent_version, parent_mapping))
} else if parent_mapping.len() == 1 {
// There's exactly one parent that's ancestor of target_bookmark.
// let's assume that the target_bookmark is still equivalent to what it represents.
Ok((target_bookmark_version, parent_mapping))
} else {
// There are at least two synced parents that are ancestors of target_bookmark. This
// practically mean we have a diamond merge at hand.
Err(anyhow!(
"Diamond merges are not supported for pushrebase sync"
))
}
}

#[derive(Clone)]
pub enum CommitSyncRepos<R> {
LargeToSmall {
Expand Down
1 change: 1 addition & 0 deletions eden/mononoke/commit_rewriting/cross_repo_sync/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@ pub use commit_syncers_lib::create_commit_syncer_lease;
pub use commit_syncers_lib::create_commit_syncers;
pub use commit_syncers_lib::find_toposorted_unsynced_ancestors;
pub use commit_syncers_lib::find_toposorted_unsynced_ancestors_with_commit_graph;
pub use commit_syncers_lib::get_version_and_parent_map_for_sync_via_pushrebase;
pub use commit_syncers_lib::get_x_repo_submodule_metadata_file_prefx_from_config;
pub use commit_syncers_lib::rewrite_commit;
pub use commit_syncers_lib::update_mapping_with_version;
Expand Down
6 changes: 6 additions & 0 deletions eden/mononoke/commit_rewriting/cross_repo_sync/test/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -205,6 +205,7 @@ where
PushrebaseRewriteDates::No,
version,
None,
Default::default(),
)
.await
}
Expand Down Expand Up @@ -1056,6 +1057,7 @@ async fn get_multiple_master_mapping_setup(
PushrebaseRewriteDates::No,
version.clone(),
None,
Default::default(),
)
.await
.expect("sync should have succeeded");
Expand All @@ -1069,6 +1071,7 @@ async fn get_multiple_master_mapping_setup(
PushrebaseRewriteDates::No,
version,
None,
Default::default(),
)
.await
.expect("sync should have succeeded");
Expand Down Expand Up @@ -1170,6 +1173,7 @@ async fn test_sync_no_op_pushrebase_has_multiple_mappings(fb: FacebookInit) -> R
PushrebaseRewriteDates::No,
version,
None,
Default::default(),
)
.await
.expect("sync should have succeeded");
Expand Down Expand Up @@ -1218,6 +1222,7 @@ async fn test_sync_real_pushrebase_has_multiple_mappings(fb: FacebookInit) -> Re
PushrebaseRewriteDates::No,
version,
None,
Default::default(),
)
.await
.expect("sync should have succeeded");
Expand Down Expand Up @@ -1590,6 +1595,7 @@ async fn test_disabled_sync_pushrebase(fb: FacebookInit) -> Result<(), Error> {
PushrebaseRewriteDates::No,
version,
None,
Default::default(),
)
.await
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ use commit_graph::CommitGraphRef;
use context::CoreContext;
use cross_repo_sync::find_toposorted_unsynced_ancestors;
use cross_repo_sync::find_toposorted_unsynced_ancestors_with_commit_graph;
use cross_repo_sync::get_version_and_parent_map_for_sync_via_pushrebase;
use cross_repo_sync::CandidateSelectionHint;
use cross_repo_sync::CommitSyncContext;
use cross_repo_sync::CommitSyncOutcome;
Expand Down Expand Up @@ -165,13 +166,13 @@ where
),
_ => None,
};
let (unsynced_ancestors, unsynced_ancestors_versions) =
let (unsynced_ancestors, synced_ancestors_versions) =
find_toposorted_unsynced_ancestors(ctx, commit_syncer, to_cs_id.clone(), hint).await?;

let version = if !unsynced_ancestors_versions.has_ancestor_with_a_known_outcome() {
let version = if !synced_ancestors_versions.has_ancestor_with_a_known_outcome() {
return Ok(SyncResult::SkippedNoKnownVersion);
} else {
let maybe_version = unsynced_ancestors_versions
let maybe_version = synced_ancestors_versions
.get_only_version()
.with_context(|| format!("failed to sync cs id {}", to_cs_id))?;
maybe_version.ok_or_else(|| {
Expand Down Expand Up @@ -199,6 +200,15 @@ where
check_forward_move(ctx, commit_syncer, to_cs_id, from_cs_id).await?;
}

let (version, parent_mapping) = get_version_and_parent_map_for_sync_via_pushrebase(
ctx,
commit_syncer,
target_bookmark,
version,
&synced_ancestors_versions,
)
.await?;

return sync_commits_via_pushrebase(
ctx,
commit_syncer,
Expand All @@ -210,6 +220,7 @@ where
pushrebase_rewrite_dates,
bookmark_update_timestamp,
unsafe_change_mapping_version_during_pushrebase,
parent_mapping,
)
.await
.map(SyncResult::Synced);
Expand Down Expand Up @@ -275,6 +286,7 @@ pub async fn sync_commits_via_pushrebase<M: SyncedCommitMapping + Clone + 'stati
pushrebase_rewrite_dates: PushrebaseRewriteDates,
bookmark_update_timestamp: Option<Timestamp>,
unsafe_change_mapping_version: &Option<CommitSyncConfigVersion>,
parent_mapping: HashMap<ChangesetId, ChangesetId>,
) -> Result<Vec<ChangesetId>, Error>
where
R: Repo,
Expand Down Expand Up @@ -340,6 +352,7 @@ where
pushrebase_rewrite_dates,
version.clone(),
change_mapping_version.clone(),
parent_mapping.clone(),
)
.timed()
.await;
Expand Down Expand Up @@ -691,6 +704,7 @@ async fn pushrebase_commit<M: SyncedCommitMapping + Clone + 'static, R>(
pushrebase_rewrite_dates: PushrebaseRewriteDates,
version: CommitSyncConfigVersion,
change_mapping_version: Option<CommitSyncConfigVersion>,
parent_mapping: HashMap<ChangesetId, ChangesetId>,
) -> Result<Option<ChangesetId>, Error>
where
R: Repo,
Expand All @@ -706,6 +720,7 @@ where
pushrebase_rewrite_dates,
version,
change_mapping_version,
parent_mapping,
)
.await
}
Expand Down
3 changes: 2 additions & 1 deletion eden/mononoke/tests/integration/just_knobs.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@
"scm/mononoke:sql_disable_auto_retries": false,
"scm/mononoke:static-backend": false,
"scm/mononoke:disable_running_hooks_in_pushredirected_repo": false,
"scm/mononoke:xrepo_assign_large_repo_version_on_pushrebase": true
"scm/mononoke:xrepo_assign_large_repo_version_on_pushrebase": true,
"scm/mononoke:xrepo_disable_forward_sync_over_mapping_change": false
},
"ints": {
"scm/mononoke_timeouts:repo_client_clone_timeout_secs": 14400,
Expand Down
Loading

0 comments on commit 70865ba

Please sign in to comment.