Add package page caching.
Caches `projects/_/packages` non-last pages if all items are older than `cache-releases-older-than` conf
Diff
CHANGELOG.md | 1 +
Cargo.lock | 13 +++++++------
Cargo.toml | 2 +-
src/cache.rs | 1 +
src/providers/gitlab.rs | 106 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++---------------------
5 files changed, 83 insertions(+), 40 deletions(-)
@@ -11,6 +11,7 @@
- Support crate yanking by creating a `yanked` file on the release.
- Add `bust-cache` command, invoked via `ssh [registry] -- bust-cache [project] [crate-name] [crate-version]` to remove eligibility cache (ie. after a crate has been yanked)
- Update dependencies, require libsodium at build & runtime.
- Add package page caching. Controlled with config `cache-releases-older-than`.
# v0.1.4
@@ -466,9 +466,9 @@
[[package]]
name = "cpufeatures"
version = "0.2.14"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "608697df725056feaccfa42cffdaeeec3fccc4ffc38358ecd19b243e716a78e0"
checksum = "0ca741a962e1b0bff6d724a1a0958b686406e853bb14061f218562e1896f95e6"
dependencies = [
"libc",
]
@@ -1910,18 +1910,18 @@
[[package]]
name = "serde"
version = "1.0.214"
version = "1.0.215"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f55c3193aca71c12ad7890f1785d2b73e1b9f63a0bbc353c08ef26fe03fc56b5"
checksum = "6513c1ad0b11a9376da888e3e0baa0077f1aed55c17f50e7b2397136129fb88f"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.214"
version = "1.0.215"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de523f781f095e28fa605cdce0f8307e451cc0fd14e2eb4cd2e98a355b147766"
checksum = "ad1e866f866923f252f05c889987993144fb74e722403468a4ebd70c3cd756c0"
dependencies = [
"proc-macro2",
"quote",
@@ -2237,6 +2237,7 @@
checksum = "5dfd88e563464686c916c7e46e623e520ddc6d79fa6641390f2e3fa86e83e885"
dependencies = [
"deranged",
"itoa",
"num-conv",
"powerfmt",
"serde",
@@ -33,7 +33,7 @@
smol_str = { version = "0.3", features = ["serde"] }
thrussh = "0.35.6"
thrussh-keys = "0.22"
time = { version = "0.3", features = ["serde", "parsing"] }
time = { version = "0.3", features = ["serde", "parsing", "formatting"] }
tokio = { version = "1.17", features = ["full"] }
tokio-util = { version = "0.7", features = ["codec"] }
toml = "0.8"
@@ -38,6 +38,7 @@
pub enum CacheKind {
Eligibility = 1,
CrateMetadata = 2,
PackagePage = 3,
}
@@ -1,7 +1,7 @@
#![allow(clippy::module_name_repetitions, clippy::blocks_in_conditions)]
use crate::{
cache::{Cache, ConcreteCache, Yoked},
cache::{Cache, CacheKind, Cacheable, ConcreteCache, Yoked},
config::{GitlabConfig, MetadataFormat},
providers::{EligibilityCacheKey, Release, User},
};
@@ -18,7 +18,7 @@
use tokio::sync::Semaphore;
use tracing::{debug, info_span, instrument, Instrument};
use url::Url;
use yoke::Yoke;
use yoke::{Yoke, Yokeable};
const PARALLEL_PACKAGE_FILES_GETS: usize = 32;
@@ -30,7 +30,7 @@
metadata_format: MetadataFormat,
admin_token: Option<String>,
cache: ConcreteCache,
cache_checksums_older_than: Duration,
cache_releases_older_than: Duration,
}
impl Gitlab {
@@ -50,7 +50,7 @@
metadata_format: config.metadata_format,
admin_token: config.admin_token.clone(),
cache,
cache_checksums_older_than: config.cache_releases_older_than,
cache_releases_older_than: config.cache_releases_older_than,
})
}
@@ -128,13 +128,13 @@
let release = Some(Release {
name: Cow::Owned(release.name.to_string()),
version: Cow::Owned(release.version.clone()),
version: Cow::Owned(release.version.to_string()),
checksum: Cow::Owned(package_file.file_sha256),
project: Cow::Owned(raw_project.to_string()),
yanked,
});
if package_file.created_at + self.cache_checksums_older_than < OffsetDateTime::now_utc() {
if package_file.created_at + self.cache_releases_older_than < OffsetDateTime::now_utc() {
self.cache
.put(cache_key, &release)
.await
@@ -286,9 +286,10 @@
))?;
{
let mut query = uri.query_pairs_mut();
query.append_pair("per_page", itoa::Buffer::new().format(100u16));
query.append_pair("per_page", "100");
query.append_pair("pagination", "keyset");
query.append_pair("sort", "asc");
query.append_pair("order_by", "created_at");
if do_as.token.is_none() {
query.append_pair("sudo", itoa::Buffer::new().format(do_as.id));
}
@@ -300,31 +301,53 @@
let futures = FuturesUnordered::new();
while let Some(uri) = next_uri.take() {
let res = handle_error(
self.client
.get(uri)
.user_or_admin_token(do_as, &self.admin_token)
.send_retry_429()
.await?,
)
.await?;
let items = if let Some(page) = self.cache.get::<PackagePage>(uri.as_str()).await? {
let PackagePage { items, next } = page.get();
next_uri.clone_from(next);
items.clone()
} else {
let res = handle_error(
self.client
.get(uri.clone())
.user_or_admin_token(do_as, &self.admin_token)
.send_retry_429()
.await?,
)
.await?;
if let Some(link_header) = res.headers().get(header::LINK) {
let mut link_header = parse_link_header::parse_with_rel(link_header.to_str()?)?;
let mut next = None::<Url>;
if let Some(link_header) = res.headers().get(header::LINK) {
let mut link_header = parse_link_header::parse_with_rel(link_header.to_str()?)?;
if let Some(next) = link_header.remove("next") {
next_uri = Some(next.raw_uri.parse()?);
if let Some(next_link) = link_header.remove("next") {
next = Some(next_link.raw_uri.parse()?);
}
}
}
let res: Vec<_> = res
.json::<Vec<GitlabPackageResponse>>()
.await?
.into_iter()
.filter(|release| release.package_type == "generic")
.collect();
for release in res {
let items: Vec<_> = res
.json::<Vec<GitlabPackageResponse>>()
.await?
.into_iter()
.filter(|release| release.package_type == "generic")
.collect();
let page = PackagePage { items, next };
if page.next.is_some()
&& page.items.iter().all(|item| {
item.created_at + self.cache_releases_older_than < OffsetDateTime::now_utc()
})
{
self.cache.put(uri.as_str(), &page).await?;
}
next_uri = page.next;
page.items
};
for release in items {
let this = Arc::clone(&self);
let do_as = Arc::clone(do_as);
let fetch_concurrency = &fetch_concurrency;
@@ -501,17 +524,19 @@
pub file_sha256: String,
}
#[derive(Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GitlabPackageResponse {
pub id: u64,
pub name: String,
pub version: String,
pub package_type: String,
pub name: SmolStr,
pub version: SmolStr,
pub package_type: SmolStr,
#[serde(with = "time::serde::rfc3339")]
pub created_at: time::OffsetDateTime,
#[serde(rename = "_links")]
pub links: GitlabPackageLinksResponse,
}
#[derive(Deserialize)]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GitlabPackageLinksResponse {
web_path: String,
}
@@ -576,5 +601,20 @@
}
return Ok(r);
}
}
}
#[derive(Debug, Serialize, Deserialize, Yokeable)]
pub struct PackagePage {
pub items: Vec<GitlabPackageResponse>,
pub next: Option<Url>,
}
impl Cacheable for PackagePage {
type Key<'b> = &'b str;
const KIND: CacheKind = CacheKind::PackagePage;
fn format_key(out: &mut Vec<u8>, k: Self::Key<'_>) {
out.extend_from_slice(k.as_bytes());
}
}