Reduce reliance on futures-util
All checks were successful
continuous-integration/drone/push Build is passing

This commit is contained in:
asonix 2023-08-23 11:59:42 -05:00
parent fb3ba0f3cd
commit 8f50a15b25
18 changed files with 79 additions and 41 deletions

1
Cargo.lock generated
View file

@ -1743,6 +1743,7 @@ dependencies = [
"console-subscriber", "console-subscriber",
"dashmap", "dashmap",
"flume", "flume",
"futures-core",
"futures-util", "futures-util",
"hex", "hex",
"md-5", "md-5",

View file

@ -27,7 +27,8 @@ config = "0.13.0"
console-subscriber = "0.1" console-subscriber = "0.1"
dashmap = "5.1.0" dashmap = "5.1.0"
flume = "0.11.0" flume = "0.11.0"
futures-util = "0.3.17" futures-core = "0.3"
futures-util = { version = "0.3.17", default-features = false }
hex = "0.4.3" hex = "0.4.3"
md-5 = "0.10.5" md-5 = "0.10.5"
metrics = "0.21.1" metrics = "0.21.1"

View file

@ -4,7 +4,8 @@ use crate::{
store::Store, store::Store,
}; };
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::{Stream, TryStreamExt}; use futures_core::Stream;
use futures_util::TryStreamExt;
use mime::APPLICATION_OCTET_STREAM; use mime::APPLICATION_OCTET_STREAM;
use tracing::{Instrument, Span}; use tracing::{Instrument, Span};

View file

@ -2,7 +2,7 @@ use actix_web::{
body::MessageBody, body::MessageBody,
web::{Bytes, BytesMut}, web::{Bytes, BytesMut},
}; };
use futures_util::Stream; use futures_core::Stream;
use std::{ use std::{
collections::{vec_deque::IntoIter, VecDeque}, collections::{vec_deque::IntoIter, VecDeque},
convert::Infallible, convert::Infallible,

View file

@ -12,7 +12,7 @@ use crate::{
process::Process, process::Process,
}; };
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::Stream; use futures_core::Stream;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use super::{Discovery, DiscoveryLite}; use super::{Discovery, DiscoveryLite};

View file

@ -2,7 +2,7 @@
mod tests; mod tests;
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::Stream; use futures_core::Stream;
use tokio::io::AsyncReadExt; use tokio::io::AsyncReadExt;
use crate::{ use crate::{

View file

@ -1,4 +1,4 @@
use futures_util::stream::Stream; use futures_core::Stream;
use std::{ use std::{
pin::Pin, pin::Pin,
task::{Context, Poll}, task::{Context, Poll},

View file

@ -6,9 +6,10 @@ pub(crate) use tokio_file::File;
#[cfg(not(feature = "io-uring"))] #[cfg(not(feature = "io-uring"))]
mod tokio_file { mod tokio_file {
use crate::{store::file_store::FileError, Either}; use crate::{store::file_store::FileError, stream::IntoStreamer, Either};
use actix_web::web::{Bytes, BytesMut}; use actix_web::web::{Bytes, BytesMut};
use futures_util::{Stream, StreamExt, TryStreamExt}; use futures_core::Stream;
use futures_util::TryStreamExt;
use std::{io::SeekFrom, path::Path}; use std::{io::SeekFrom, path::Path};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWrite, AsyncWriteExt}; use tokio::io::{AsyncRead, AsyncReadExt, AsyncSeekExt, AsyncWrite, AsyncWriteExt};
use tokio_util::codec::{BytesCodec, FramedRead}; use tokio_util::codec::{BytesCodec, FramedRead};
@ -43,7 +44,8 @@ mod tokio_file {
where where
S: Stream<Item = std::io::Result<Bytes>>, S: Stream<Item = std::io::Result<Bytes>>,
{ {
futures_util::pin_mut!(stream); let stream = std::pin::pin!(stream);
let mut stream = stream.into_streamer();
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {
let mut bytes = res?; let mut bytes = res?;
@ -102,9 +104,9 @@ mod tokio_file {
#[cfg(feature = "io-uring")] #[cfg(feature = "io-uring")]
mod io_uring { mod io_uring {
use crate::store::file_store::FileError; use crate::{store::file_store::FileError, stream::IntoStreamer};
use actix_web::web::{Bytes, BytesMut}; use actix_web::web::{Bytes, BytesMut};
use futures_util::stream::{Stream, StreamExt}; use futures_core::Stream;
use std::{ use std::{
convert::TryInto, convert::TryInto,
fs::Metadata, fs::Metadata,
@ -181,7 +183,8 @@ mod io_uring {
where where
S: Stream<Item = std::io::Result<Bytes>>, S: Stream<Item = std::io::Result<Bytes>>,
{ {
futures_util::pin_mut!(stream); let stream = std::pin::pin!(stream);
let mut stream = stream.into_streamer();
let mut cursor: u64 = 0; let mut cursor: u64 = 0;
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {

View file

@ -5,9 +5,10 @@ use crate::{
formats::{InternalFormat, Validations}, formats::{InternalFormat, Validations},
repo::{Alias, ArcRepo, DeleteToken, Hash}, repo::{Alias, ArcRepo, DeleteToken, Hash},
store::Store, store::Store,
stream::IntoStreamer,
}; };
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::{Stream, StreamExt}; use futures_core::Stream;
use tracing::{Instrument, Span}; use tracing::{Instrument, Span};
mod hasher; mod hasher;
@ -26,12 +27,14 @@ where
} }
#[tracing::instrument(skip(stream))] #[tracing::instrument(skip(stream))]
async fn aggregate<S>(mut stream: S) -> Result<Bytes, Error> async fn aggregate<S>(stream: S) -> Result<Bytes, Error>
where where
S: Stream<Item = Result<Bytes, Error>> + Unpin, S: Stream<Item = Result<Bytes, Error>> + Unpin,
{ {
let mut buf = BytesStream::new(); let mut buf = BytesStream::new();
let mut stream = stream.into_streamer();
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {
buf.add_bytes(res?); buf.add_bytes(res?);
} }

View file

@ -34,10 +34,8 @@ use actix_web::{
http::header::{CacheControl, CacheDirective, LastModified, Range, ACCEPT_RANGES}, http::header::{CacheControl, CacheDirective, LastModified, Range, ACCEPT_RANGES},
web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer, web, App, HttpRequest, HttpResponse, HttpResponseBuilder, HttpServer,
}; };
use futures_util::{ use futures_core::Stream;
stream::{empty, once}, use futures_util::{StreamExt, TryStreamExt};
Stream, StreamExt, TryStreamExt,
};
use metrics_exporter_prometheus::PrometheusBuilder; use metrics_exporter_prometheus::PrometheusBuilder;
use middleware::Metrics; use middleware::Metrics;
use once_cell::sync::Lazy; use once_cell::sync::Lazy;
@ -46,7 +44,6 @@ use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
use reqwest_tracing::TracingMiddleware; use reqwest_tracing::TracingMiddleware;
use rusty_s3::UrlStyle; use rusty_s3::UrlStyle;
use std::{ use std::{
future::ready,
path::Path, path::Path,
path::PathBuf, path::PathBuf,
sync::Arc, sync::Arc,
@ -72,7 +69,7 @@ use self::{
repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult}, repo::{sled::SledRepo, Alias, DeleteToken, Hash, Repo, UploadId, UploadResult},
serde_str::Serde, serde_str::Serde,
store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store}, store::{file_store::FileStore, object_store::ObjectStore, Identifier, Store},
stream::{StreamLimit, StreamTimeout}, stream::{empty, once, StreamLimit, StreamTimeout},
}; };
pub use self::config::{ConfigSource, PictRsConfiguration}; pub use self::config::{ConfigSource, PictRsConfiguration};
@ -845,12 +842,9 @@ async fn process<S: Store + 'static>(
return Err(UploadError::Range.into()); return Err(UploadError::Range.into());
} }
} else if not_found { } else if not_found {
( (HttpResponse::NotFound(), Either::right(once(Ok(bytes))))
HttpResponse::NotFound(),
Either::right(once(ready(Ok(bytes)))),
)
} else { } else {
(HttpResponse::Ok(), Either::right(once(ready(Ok(bytes))))) (HttpResponse::Ok(), Either::right(once(Ok(bytes))))
}; };
Ok(srv_response( Ok(srv_response(

View file

@ -1,4 +1,3 @@
use futures_util::StreamExt;
use std::{ use std::{
rc::Rc, rc::Rc,
sync::atomic::{AtomicU64, Ordering}, sync::atomic::{AtomicU64, Ordering},
@ -10,6 +9,7 @@ use crate::{
error::{Error, UploadError}, error::{Error, UploadError},
repo::{ArcRepo, Hash}, repo::{ArcRepo, Hash},
store::{Identifier, Store}, store::{Identifier, Store},
stream::IntoStreamer,
}; };
pub(super) async fn migrate_store<S1, S2>( pub(super) async fn migrate_store<S1, S2>(
@ -103,8 +103,7 @@ where
} }
// Hashes are read in a consistent order // Hashes are read in a consistent order
let stream = repo.hashes().await; let mut stream = repo.hashes().await.into_streamer();
let mut stream = Box::pin(stream);
let state = Rc::new(MigrateState { let state = Rc::new(MigrateState {
repo: repo.clone(), repo: repo.clone(),

View file

@ -5,8 +5,8 @@ use crate::{
repo::{Alias, ArcRepo, DeleteToken, Hash}, repo::{Alias, ArcRepo, DeleteToken, Hash},
serde_str::Serde, serde_str::Serde,
store::{Identifier, Store}, store::{Identifier, Store},
stream::IntoStreamer,
}; };
use futures_util::StreamExt;
pub(super) fn perform<'a, S>( pub(super) fn perform<'a, S>(
repo: &'a ArcRepo, repo: &'a ArcRepo,
@ -136,7 +136,7 @@ async fn alias(repo: &ArcRepo, alias: Alias, token: DeleteToken) -> Result<(), E
#[tracing::instrument(skip_all)] #[tracing::instrument(skip_all)]
async fn all_variants(repo: &ArcRepo) -> Result<(), Error> { async fn all_variants(repo: &ArcRepo) -> Result<(), Error> {
let mut hash_stream = Box::pin(repo.hashes().await); let mut hash_stream = repo.hashes().await.into_streamer();
while let Some(res) = hash_stream.next().await { while let Some(res) = hash_stream.next().await {
let hash = res?; let hash = res?;
@ -151,7 +151,7 @@ async fn outdated_variants(repo: &ArcRepo, config: &Configuration) -> Result<(),
let now = time::OffsetDateTime::now_utc(); let now = time::OffsetDateTime::now_utc();
let since = now.saturating_sub(config.media.retention.variants.to_duration()); let since = now.saturating_sub(config.media.retention.variants.to_duration());
let mut variant_stream = Box::pin(repo.older_variants(since).await?); let mut variant_stream = repo.older_variants(since).await?.into_streamer();
while let Some(res) = variant_stream.next().await { while let Some(res) = variant_stream.next().await {
let (hash, variant) = res?; let (hash, variant) = res?;
@ -166,7 +166,7 @@ async fn outdated_proxies(repo: &ArcRepo, config: &Configuration) -> Result<(),
let now = time::OffsetDateTime::now_utc(); let now = time::OffsetDateTime::now_utc();
let since = now.saturating_sub(config.media.retention.proxy.to_duration()); let since = now.saturating_sub(config.media.retention.proxy.to_duration());
let mut alias_stream = Box::pin(repo.older_aliases(since).await?); let mut alias_stream = repo.older_aliases(since).await?.into_streamer();
while let Some(res) = alias_stream.next().await { while let Some(res) = alias_stream.next().await {
let alias = res?; let alias = res?;

View file

@ -1,13 +1,13 @@
use crate::{ use crate::{
error::{Error, UploadError}, error::{Error, UploadError},
store::Store, store::Store,
stream::once,
}; };
use actix_web::{ use actix_web::{
http::header::{ByteRangeSpec, ContentRange, ContentRangeSpec, Range}, http::header::{ByteRangeSpec, ContentRange, ContentRangeSpec, Range},
web::Bytes, web::Bytes,
}; };
use futures_util::stream::{once, Stream}; use futures_core::Stream;
use std::future::ready;
pub(crate) fn chop_bytes( pub(crate) fn chop_bytes(
byte_range: &ByteRangeSpec, byte_range: &ByteRangeSpec,
@ -17,7 +17,7 @@ pub(crate) fn chop_bytes(
if let Some((start, end)) = byte_range.to_satisfiable_range(length) { if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
// END IS INCLUSIVE // END IS INCLUSIVE
let end = end as usize + 1; let end = end as usize + 1;
return Ok(once(ready(Ok(bytes.slice(start as usize..end))))); return Ok(once(Ok(bytes.slice(start as usize..end))));
} }
Err(UploadError::Range.into()) Err(UploadError::Range.into())

View file

@ -4,7 +4,7 @@ use crate::{
repo::{Alias, DeleteToken}, repo::{Alias, DeleteToken},
store::{Identifier, StoreError}, store::{Identifier, StoreError},
}; };
use futures_util::Stream; use futures_core::Stream;
use std::fmt::Debug; use std::fmt::Debug;
pub(crate) use self::sled::SledRepo; pub(crate) use self::sled::SledRepo;

View file

@ -1,6 +1,6 @@
use actix_web::web::Bytes; use actix_web::web::Bytes;
use base64::{prelude::BASE64_STANDARD, Engine}; use base64::{prelude::BASE64_STANDARD, Engine};
use futures_util::stream::Stream; use futures_core::Stream;
use std::{fmt::Debug, sync::Arc}; use std::{fmt::Debug, sync::Arc};
use tokio::io::{AsyncRead, AsyncWrite}; use tokio::io::{AsyncRead, AsyncWrite};

View file

@ -4,7 +4,7 @@ use crate::{
store::Store, store::Store,
}; };
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::stream::Stream; use futures_core::Stream;
use std::{ use std::{
path::{Path, PathBuf}, path::{Path, PathBuf},
pin::Pin, pin::Pin,

View file

@ -2,6 +2,7 @@ use crate::{
bytes_stream::BytesStream, bytes_stream::BytesStream,
repo::{Repo, SettingsRepo}, repo::{Repo, SettingsRepo},
store::Store, store::Store,
stream::IntoStreamer,
}; };
use actix_rt::task::JoinError; use actix_rt::task::JoinError;
use actix_web::{ use actix_web::{
@ -13,7 +14,8 @@ use actix_web::{
web::Bytes, web::Bytes,
}; };
use base64::{prelude::BASE64_STANDARD, Engine}; use base64::{prelude::BASE64_STANDARD, Engine};
use futures_util::{Stream, StreamExt, TryStreamExt}; use futures_core::Stream;
use futures_util::TryStreamExt;
use reqwest::{header::RANGE, Body, Response}; use reqwest::{header::RANGE, Body, Response};
use reqwest_middleware::{ClientWithMiddleware, RequestBuilder}; use reqwest_middleware::{ClientWithMiddleware, RequestBuilder};
use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle}; use rusty_s3::{actions::S3Action, Bucket, BucketError, Credentials, UrlStyle};
@ -143,6 +145,8 @@ where
{ {
let mut buf = BytesStream::new(); let mut buf = BytesStream::new();
let mut stream = stream.into_streamer();
while buf.len() < CHUNK_SIZE { while buf.len() < CHUNK_SIZE {
if let Some(res) = stream.next().await { if let Some(res) = stream.next().await {
buf.add_bytes(res?) buf.add_bytes(res?)
@ -404,7 +408,7 @@ impl Store for ObjectStore {
)); ));
} }
let mut stream = response.bytes_stream(); let mut stream = response.bytes_stream().into_streamer();
while let Some(res) = stream.next().await { while let Some(res) = stream.next().await {
let mut bytes = res.map_err(payload_to_io_error)?; let mut bytes = res.map_err(payload_to_io_error)?;

View file

@ -1,8 +1,9 @@
use actix_rt::{task::JoinHandle, time::Sleep}; use actix_rt::{task::JoinHandle, time::Sleep};
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::Stream; use futures_core::Stream;
use std::{ use std::{
future::Future, future::Future,
marker::PhantomData,
pin::Pin, pin::Pin,
sync::{ sync::{
atomic::{AtomicBool, Ordering}, atomic::{AtomicBool, Ordering},
@ -12,6 +13,37 @@ use std::{
time::Duration, time::Duration,
}; };
pub(crate) struct Empty<T>(PhantomData<T>);
impl<T> Stream for Empty<T> {
type Item = T;
fn poll_next(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Poll::Ready(None)
}
}
pub(crate) fn empty<T>() -> Empty<T> {
Empty(PhantomData)
}
pub(crate) struct Once<T>(Option<T>);
impl<T> Stream for Once<T>
where
T: Unpin,
{
type Item = T;
fn poll_next(mut self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Poll::Ready(self.0.take())
}
}
pub(crate) fn once<T>(value: T) -> Once<T> {
Once(Some(value))
}
pub(crate) type LocalBoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + 'a>>; pub(crate) type LocalBoxStream<'a, T> = Pin<Box<dyn Stream<Item = T> + 'a>>;
pub(crate) trait StreamLimit { pub(crate) trait StreamLimit {