Enable setting timestamp on hash creation, improve paging api
Some checks failed
continuous-integration/drone/push Build is failing

This commit is contained in:
asonix 2023-08-28 18:43:24 -05:00
parent 24812afeba
commit 2b6ea97773
5 changed files with 43 additions and 12 deletions

View file

@ -30,6 +30,13 @@ impl Details {
self.content_type.type_() == "video"
}
pub(crate) fn created_at(&self) -> time::OffsetDateTime {
match self.created_at {
MaybeHumanDate::OldDate(timestamp) => timestamp,
MaybeHumanDate::HumanDate(timestamp) => timestamp,
}
}
pub(crate) async fn from_bytes(timeout: u64, input: web::Bytes) -> Result<Self, Error> {
let DiscoveryLite {
format,

View file

@ -585,6 +585,9 @@ struct PageQuery {
struct PageJson {
limit: usize,
#[serde(skip_serializing_if = "Option::is_none")]
current: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
prev: Option<String>,
@ -609,7 +612,7 @@ async fn page(
) -> Result<HttpResponse, Error> {
let limit = limit.unwrap_or(20);
let page = repo.hash_page(slug, limit).await?;
let page = repo.hash_page(slug.clone(), limit).await?;
let mut hashes = Vec::with_capacity(page.hashes.len());
@ -638,6 +641,7 @@ async fn page(
let page = PageJson {
limit: page.limit,
current: slug,
prev: page.prev(),
next: page.next(),
hashes,

View file

@ -500,7 +500,7 @@ where
}
}
#[derive(Clone)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
pub(crate) struct OrderedHash {
timestamp: time::OffsetDateTime,
hash: Hash,
@ -561,6 +561,16 @@ pub(crate) trait HashRepo: BaseRepo {
&self,
hash: Hash,
identifier: &dyn Identifier,
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
self.create_hash_with_timestamp(hash, identifier, time::OffsetDateTime::now_utc())
.await
}
async fn create_hash_with_timestamp(
&self,
hash: Hash,
identifier: &dyn Identifier,
timestamp: time::OffsetDateTime,
) -> Result<Result<(), HashAlreadyExists>, StoreError>;
async fn update_identifier(
@ -616,12 +626,13 @@ where
T::hashes_ordered(self, bound, limit).await
}
async fn create_hash(
async fn create_hash_with_timestamp(
&self,
hash: Hash,
identifier: &dyn Identifier,
timestamp: time::OffsetDateTime,
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
T::create_hash(self, hash, identifier).await
T::create_hash_with_timestamp(self, hash, identifier, timestamp).await
}
async fn update_identifier(

View file

@ -285,7 +285,9 @@ async fn do_migrate_hash_04<S: Store>(
let hash = Hash::new(hash, size, hash_details.internal_format());
let _ = new_repo.create_hash(hash.clone(), &identifier).await?;
let _ = new_repo
.create_hash_with_timestamp(hash.clone(), &identifier, hash_details.created_at())
.await?;
for alias in aliases {
let delete_token = old_repo

View file

@ -1055,7 +1055,7 @@ impl HashRepo for SledRepo {
Some(ordered_hash) => {
let hash_bytes = serialize_ordered_hash(ordered_hash);
(
self.hashes_inverse.range(..hash_bytes.clone()),
self.hashes_inverse.range(..=hash_bytes.clone()),
Some(self.hashes_inverse.range(hash_bytes..)),
)
}
@ -1067,21 +1067,27 @@ impl HashRepo for SledRepo {
.keys()
.rev()
.filter_map(|res| res.map(parse_ordered_hash).transpose())
.take(limit);
.take(limit + 1);
let prev = prev_iter
.and_then(|prev_iter| {
prev_iter
.keys()
.filter_map(|res| res.map(parse_ordered_hash).transpose())
.take(limit)
.take(limit + 1)
.last()
})
.transpose()?;
let hashes = page_iter.collect::<Result<Vec<_>, _>>()?;
let mut hashes = page_iter.collect::<Result<Vec<_>, _>>()?;
let next = hashes.last().cloned();
let next = if hashes.len() > limit {
hashes.pop()
} else {
None
};
let prev = if prev == bound { None } else { prev };
Ok(HashPage {
limit,
@ -1099,10 +1105,11 @@ impl HashRepo for SledRepo {
}
#[tracing::instrument(level = "trace", skip(self))]
async fn create_hash(
async fn create_hash_with_timestamp(
&self,
hash: Hash,
identifier: &dyn Identifier,
timestamp: time::OffsetDateTime,
) -> Result<Result<(), HashAlreadyExists>, StoreError> {
let identifier: sled::IVec = identifier.to_bytes()?.into();
@ -1111,7 +1118,7 @@ impl HashRepo for SledRepo {
let hash_identifiers = self.hash_identifiers.clone();
let created_key = serialize_ordered_hash(&OrderedHash {
timestamp: time::OffsetDateTime::now_utc(),
timestamp,
hash: hash.clone(),
});