Auto merge of #11255 - epage:registry, r=arlosi

fix(publish): Check remote git registry more than once post-publish

With `publish-timeout` on remote git registries, we were checking once and never checking again.  There were 3 layers of guards preventing the cache from being updating
- `needs_update`, handled via `invalidate_cache`
- `config.updated_sources()`,. handled by removing from the HashSet
- `updated`, inaccessible

This change collapses `updated` into `config.updated_sources()`, allowing the cache to be updated

Tested by publishing `epage-publish-test`.  After about 7 registry updates, it succeded.  Before, it just hit the timeout of 60s.  No tests are added as we don't have the plumbing setup to be able to control when the test remote git registry publishes.  All of our tests focus on the remote http registry.

Fixes #11253
This commit is contained in:
bors 2022-10-20 11:17:03 +00:00
commit a6e72d0a27
1 changed files with 20 additions and 15 deletions

View File

@ -32,7 +32,6 @@ pub struct RemoteRegistry<'cfg> {
head: Cell<Option<git2::Oid>>,
current_sha: Cell<Option<InternedString>>,
needs_update: bool, // Does this registry need to be updated?
updated: bool, // Has this registry been updated this session?
}
impl<'cfg> RemoteRegistry<'cfg> {
@ -49,7 +48,6 @@ impl<'cfg> RemoteRegistry<'cfg> {
head: Cell::new(None),
current_sha: Cell::new(None),
needs_update: false,
updated: false,
}
}
@ -141,6 +139,14 @@ impl<'cfg> RemoteRegistry<'cfg> {
self.current_sha.set(Some(sha));
Some(sha)
}
fn is_updated(&self) -> bool {
self.config.updated_sources().contains(&self.source_id)
}
fn mark_updated(&self) {
self.config.updated_sources().insert(self.source_id);
}
}
const LAST_UPDATED_FILE: &str = ".last-updated";
@ -214,7 +220,7 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
match load_helper(&self, path, index_version) {
Ok(result) => Poll::Ready(Ok(result)),
Err(_) if !self.updated => {
Err(_) if !self.is_updated() => {
// If git returns an error and we haven't updated the repo, return
// pending to allow an update to try again.
self.needs_update = true;
@ -250,21 +256,22 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
return Ok(());
}
self.updated = true;
self.needs_update = false;
// Make sure the index is only updated once per session since it is an
// expensive operation. This generally only happens when the resolver
// is run multiple times, such as during `cargo publish`.
if self.is_updated() {
return Ok(());
}
self.mark_updated();
if self.config.offline() {
return Ok(());
}
if self.config.cli_unstable().no_index_update {
return Ok(());
}
// Make sure the index is only updated once per session since it is an
// expensive operation. This generally only happens when the resolver
// is run multiple times, such as during `cargo publish`.
if self.config.updated_sources().contains(&self.source_id) {
return Ok(());
}
debug!("updating the index");
@ -291,7 +298,6 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
let repo = self.repo.borrow_mut().unwrap();
git::fetch(repo, url.as_str(), &self.index_git_ref, self.config)
.with_context(|| format!("failed to fetch `{}`", url))?;
self.config.updated_sources().insert(self.source_id);
// Create a dummy file to record the mtime for when we updated the
// index.
@ -301,13 +307,12 @@ impl<'cfg> RegistryData for RemoteRegistry<'cfg> {
}
fn invalidate_cache(&mut self) {
if !self.updated {
self.needs_update = true;
}
// To fully invalidate, undo `mark_updated`s work
self.needs_update = true;
}
fn is_updated(&self) -> bool {
self.updated
self.is_updated()
}
fn download(&mut self, pkg: PackageId, checksum: &str) -> CargoResult<MaybeLock> {