Compare commits

...

19 commits

Author SHA1 Message Date
asonix 97567cf598 Prepare v0.3.113
All checks were successful
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m26s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m34s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m40s
/ clippy (push) Successful in 1m40s
/ build (map[artifact:linux-amd64 platform:linux/amd64 target:x86_64-unknown-linux-musl]) (push) Successful in 6m22s
/ tests (push) Successful in 2m19s
/ publish-crate (push) Successful in 2m25s
/ build (map[artifact:linux-arm32v7 platform:linux/arm/v7 target:armv7-unknown-linux-musleabihf]) (push) Successful in 6m20s
/ publish-forgejo (push) Successful in 23s
/ publish-docker (push) Successful in 29s
/ build (map[artifact:linux-arm64v8 platform:linux/arm64 target:aarch64-unknown-linux-musl]) (push) Successful in 3m56s
2024-05-01 15:45:53 -05:00
asonix 4c663f399e Update dependencies (minor & point) 2024-05-01 15:43:53 -05:00
asonix 8a3256f52a Avoid deadlock of iterating over tree while transacting on that tree 2024-05-01 15:43:08 -05:00
asonix 13a2653fe8 Remove prerelease flag
All checks were successful
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m32s
/ clippy (push) Successful in 1m55s
/ tests (push) Successful in 2m43s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m22s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m45s
2024-04-23 14:00:04 -05:00
asonix 8dd9a86d22 Use match_pattern rather than path for metrics differentiation
All checks were successful
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 2m34s
/ check (x86_64-unknown-linux-musl) (push) Successful in 2m28s
/ check (aarch64-unknown-linux-musl) (push) Successful in 2m31s
/ clippy (push) Successful in 2m47s
/ tests (push) Successful in 3m10s
/ publish-docker (push) Successful in 14s
/ publish-forgejo (push) Successful in 20s
/ publish-crate (push) Successful in 2m27s
/ build (map[artifact:linux-arm64v8 platform:linux/arm64 target:aarch64-unknown-linux-musl]) (push) Successful in 4m21s
/ build (map[artifact:linux-amd64 platform:linux/amd64 target:x86_64-unknown-linux-musl]) (push) Successful in 5m37s
/ build (map[artifact:linux-arm32v7 platform:linux/arm/v7 target:armv7-unknown-linux-musleabihf]) (push) Successful in 5m27s
2024-04-21 11:44:16 -05:00
asonix 5c0c0591dd Prepare 0.3.112
All checks were successful
/ clippy (push) Successful in 1m24s
/ tests (push) Successful in 2m2s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 2m33s
/ check (x86_64-unknown-linux-musl) (push) Successful in 2m26s
/ check (aarch64-unknown-linux-musl) (push) Successful in 2m38s
2024-04-14 22:47:38 -05:00
asonix 04ca4e5401 Stable async-cpupool
All checks were successful
/ clippy (push) Successful in 2m18s
/ tests (push) Successful in 3m15s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m29s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m35s
/ check (x86_64-unknown-linux-musl) (push) Successful in 2m42s
2024-04-14 19:53:31 -05:00
asonix 1de1d76506 prerelease
All checks were successful
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m22s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m39s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m7s
/ clippy (push) Successful in 1m30s
/ tests (push) Successful in 2m6s
/ build (map[artifact:linux-amd64 platform:linux/amd64 target:x86_64-unknown-linux-musl]) (push) Successful in 3m46s
/ publish-docker (push) Successful in 14s
/ publish-forgejo (push) Successful in 19s
/ publish-crate (push) Successful in 2m28s
/ build (map[artifact:linux-arm32v7 platform:linux/arm/v7 target:armv7-unknown-linux-musleabihf]) (push) Successful in 3m46s
/ build (map[artifact:linux-arm64v8 platform:linux/arm64 target:aarch64-unknown-linux-musl]) (push) Successful in 3m48s
2024-04-13 13:57:12 -05:00
asonix dd9225bb89 Prepare v0.3.111
All checks were successful
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m33s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m52s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m37s
/ clippy (push) Successful in 1m41s
/ tests (push) Successful in 2m20s
/ publish-docker (push) Successful in 13s
/ publish-forgejo (push) Successful in 19s
/ publish-crate (push) Successful in 2m26s
/ build (map[artifact:linux-arm64v8 platform:linux/arm64 target:aarch64-unknown-linux-musl]) (push) Successful in 3m53s
/ build (map[artifact:linux-arm32v7 platform:linux/arm/v7 target:armv7-unknown-linux-musleabihf]) (push) Successful in 6m15s
/ build (map[artifact:linux-amd64 platform:linux/amd64 target:x86_64-unknown-linux-musl]) (push) Successful in 7m44s
2024-04-07 11:53:24 -05:00
asonix b577730836 Fix build
Some checks failed
/ clippy (push) Successful in 1m18s
/ check (aarch64-unknown-linux-musl) (push) Successful in 2m31s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 2m32s
/ tests (push) Successful in 2m2s
/ check (x86_64-unknown-linux-musl) (push) Has been cancelled
2024-04-07 11:40:57 -05:00
asonix 21883c168b BROKEN! Start collecting more metrics about various sizes 2024-04-07 11:04:03 -05:00
asonix 76a0c79369 Update base64, ammonia
All checks were successful
/ check (aarch64-unknown-linux-musl) (push) Successful in 2m32s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 2m31s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m28s
/ tests (push) Successful in 2m18s
/ publish-docker (push) Successful in 15s
/ clippy (push) Successful in 1m48s
/ build (map[artifact:linux-arm32v7 platform:linux/arm/v7 target:armv7-unknown-linux-musleabihf]) (push) Successful in 6m15s
/ publish-forgejo (push) Successful in 22s
/ publish-crate (push) Successful in 2m28s
/ build (map[artifact:linux-amd64 platform:linux/amd64 target:x86_64-unknown-linux-musl]) (push) Successful in 6m15s
/ build (map[artifact:linux-arm64v8 platform:linux/arm64 target:aarch64-unknown-linux-musl]) (push) Successful in 3m53s
2024-04-06 13:42:29 -05:00
asonix 6444782db9 Bump version, Update dependencies (minor & point) 2024-04-06 13:34:54 -05:00
asonix 14aea3256d Update dependencies (minor & point)
All checks were successful
/ clippy (push) Successful in 2m5s
/ tests (push) Successful in 3m15s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m40s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 5m14s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m46s
2024-03-23 19:10:13 -05:00
asonix f4f2aa2025 Update flake 2024-03-23 19:09:53 -05:00
asonix 615271fe80 Update opentelemetry dependencies, other dependencies minor & point
All checks were successful
/ clippy (push) Successful in 2m20s
/ tests (push) Successful in 3m10s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m38s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m29s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m32s
2024-03-10 20:09:16 -05:00
asonix 4aed601664 No console by default
All checks were successful
/ tests (push) Successful in 2m2s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 2m31s
/ check (x86_64-unknown-linux-musl) (push) Successful in 2m26s
/ clippy (push) Successful in 1m25s
/ check (aarch64-unknown-linux-musl) (push) Successful in 2m31s
2024-02-25 21:08:17 -06:00
asonix bf21f05aca Strip release binaries
All checks were successful
/ clippy (push) Successful in 2m22s
/ tests (push) Successful in 3m17s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m34s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m40s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m45s
2024-02-12 15:16:20 -06:00
asonix e69f6c6edb Remove prerelease marker
All checks were successful
/ tests (push) Successful in 3m4s
/ check (aarch64-unknown-linux-musl) (push) Successful in 3m26s
/ check (armv7-unknown-linux-musleabihf) (push) Successful in 3m36s
/ check (x86_64-unknown-linux-musl) (push) Successful in 3m39s
/ clippy (push) Successful in 2m12s
2024-02-12 14:32:46 -06:00
12 changed files with 589 additions and 493 deletions

View file

@ -207,7 +207,6 @@ jobs:
direction: upload
token: ${{ secrets.GITHUB_TOKEN }}
release-dir: artifacts/
prerelease: true
publish-crate:
needs: [build]

837
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
[package]
name = "ap-relay"
description = "A simple activitypub relay"
version = "0.3.109"
version = "0.3.113"
authors = ["asonix <asonix@asonix.dog>"]
license = "AGPL-3.0"
readme = "README.md"
@ -14,6 +14,9 @@ build = "src/build.rs"
name = "relay"
path = "src/main.rs"
[profile.release]
strip = true
[features]
console = ["dep:console-subscriber"]
default = []
@ -25,17 +28,16 @@ actix-web = { version = "4.4.0", default-features = false, features = ["compress
actix-webfinger = { version = "0.5.0", default-features = false }
activitystreams = "0.7.0-alpha.25"
activitystreams-ext = "0.1.0-alpha.3"
ammonia = "3.1.0"
async-cpupool = "0.2.0"
ammonia = "4.0.0"
async-cpupool = "0.2.2"
bcrypt = "0.15"
base64 = "0.21"
base64 = "0.22"
clap = { version = "4.0.0", features = ["derive"] }
color-eyre = "0.6.2"
config = { version = "0.14.0", default-features = false, features = ["toml", "json", "yaml"] }
console-subscriber = { version = "0.2", optional = true }
dashmap = "5.1.0"
dotenv = "0.15.0"
flume = "0.11.0"
lru = "0.12.0"
metrics = "0.22.0"
metrics-exporter-prometheus = { version = "0.13.0", default-features = false, features = [
@ -44,9 +46,9 @@ metrics-exporter-prometheus = { version = "0.13.0", default-features = false, fe
metrics-util = "0.16.0"
mime = "0.3.16"
minify-html = "0.15.0"
opentelemetry = "0.21"
opentelemetry_sdk = { version = "0.21", features = ["rt-tokio"] }
opentelemetry-otlp = "0.14"
opentelemetry = "0.22"
opentelemetry_sdk = { version = "0.22", features = ["rt-tokio"] }
opentelemetry-otlp = "0.15"
pin-project-lite = "0.2.9"
# pinned to metrics-util
quanta = "0.12.0"
@ -55,7 +57,7 @@ reqwest = { version = "0.11", default-features = false, features = ["rustls-tls"
reqwest-middleware = "0.2"
reqwest-tracing = "0.4.5"
ring = "0.17.5"
rsa = { version = "0.9" }
rsa = "0.9"
rsa-magic-public-key = "0.8.0"
rustls = "0.22.0"
rustls-channel-resolver = "0.2.0"
@ -74,7 +76,7 @@ time = { version = "0.3.17", features = ["serde"] }
tracing = "0.1"
tracing-error = "0.2"
tracing-log = "0.2"
tracing-opentelemetry = "0.22"
tracing-opentelemetry = "0.23"
tracing-subscriber = { version = "0.3", features = [
"ansi",
"env-filter",

View file

@ -5,11 +5,11 @@
"systems": "systems"
},
"locked": {
"lastModified": 1701680307,
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
"lastModified": 1710146030,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a",
"type": "github"
},
"original": {
@ -20,11 +20,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1705133751,
"narHash": "sha256-rCIsyE80jgiOU78gCWN3A0wE0tR2GI5nH6MlS+HaaSQ=",
"lastModified": 1711163522,
"narHash": "sha256-YN/Ciidm+A0fmJPWlHBGvVkcarYWSC+s3NTPk/P+q3c=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "9b19f5e77dd906cb52dade0b7bd280339d2a1f3d",
"rev": "44d0940ea560dee511026a53f0e2e2cde489b4d4",
"type": "github"
},
"original": {

View file

@ -5,7 +5,7 @@
rustPlatform.buildRustPackage {
pname = "relay";
version = "0.3.109";
version = "0.3.113";
src = ./.;
cargoLock.lockFile = ./Cargo.lock;

View file

@ -15,6 +15,10 @@ const MINUTES: u64 = 60 * SECONDS;
const HOURS: u64 = 60 * MINUTES;
const DAYS: u64 = 24 * HOURS;
pub(crate) fn recordable(len: usize) -> u32 {
((len as u64) % u64::from(u32::MAX)) as u32
}
type DistributionMap = BTreeMap<Vec<(String, String)>, Summary>;
#[derive(Clone)]
@ -299,7 +303,14 @@ impl Inner {
for sample in samples {
entry.add(*sample);
}
})
});
let mut total_len = 0;
for dist_map in d.values() {
total_len += dist_map.len();
}
metrics::gauge!("relay.collector.distributions.size").set(recordable(total_len));
}
let d = self.distributions.read().unwrap().clone();
@ -358,6 +369,7 @@ impl MemoryCollector {
) {
let mut d = self.inner.descriptions.write().unwrap();
d.entry(key.as_str().to_owned()).or_insert(description);
metrics::gauge!("relay.collector.descriptions.size").set(recordable(d.len()));
}
pub(crate) fn install(&self) -> Result<(), SetRecorderError<Self>> {

View file

@ -9,10 +9,10 @@ pub(crate) struct LastOnline {
impl LastOnline {
pub(crate) fn mark_seen(&self, iri: &IriStr) {
if let Some(authority) = iri.authority_str() {
self.domains
.lock()
.unwrap()
.insert(authority.to_string(), OffsetDateTime::now_utc());
let mut guard = self.domains.lock().unwrap();
guard.insert(authority.to_string(), OffsetDateTime::now_utc());
metrics::gauge!("relay.last-online.size",)
.set(crate::collector::recordable(guard.len()));
}
}

View file

@ -73,7 +73,9 @@ impl State {
}
pub(crate) fn cache(&self, object_id: IriString, actor_id: IriString) {
self.object_cache.write().unwrap().put(object_id, actor_id);
let mut guard = self.object_cache.write().unwrap();
guard.put(object_id, actor_id);
metrics::gauge!("relay.object-cache.size").set(crate::collector::recordable(guard.len()));
}
pub(crate) fn is_connected(&self, iri: &IriString) -> bool {

142
src/db.rs
View file

@ -7,7 +7,7 @@ use rsa::{
pkcs8::{DecodePrivateKey, EncodePrivateKey},
RsaPrivateKey,
};
use sled::{Batch, Tree};
use sled::{transaction::TransactionError, Batch, Transactional, Tree};
use std::{
collections::{BTreeMap, HashMap},
sync::{
@ -283,10 +283,15 @@ impl Db {
pub(crate) async fn check_health(&self) -> Result<(), Error> {
let next = self.inner.healthz_counter.fetch_add(1, Ordering::Relaxed);
self.unblock(move |inner| {
inner
let res = inner
.healthz
.insert("healthz", &next.to_be_bytes()[..])
.map_err(Error::from)
.map_err(Error::from);
metrics::gauge!("relay.db.healthz.size")
.set(crate::collector::recordable(inner.healthz.len()));
res
})
.await?;
self.inner.healthz.flush_async().await?;
@ -349,6 +354,9 @@ impl Db {
.actor_id_info
.insert(actor_id.as_str().as_bytes(), vec)?;
metrics::gauge!("relay.db.actor-id-info.size")
.set(crate::collector::recordable(inner.actor_id_info.len()));
Ok(())
})
.await
@ -383,6 +391,9 @@ impl Db {
.actor_id_instance
.insert(actor_id.as_str().as_bytes(), vec)?;
metrics::gauge!("relay.db.actor-id-instance.size")
.set(crate::collector::recordable(inner.actor_id_instance.len()));
Ok(())
})
.await
@ -417,6 +428,9 @@ impl Db {
.actor_id_contact
.insert(actor_id.as_str().as_bytes(), vec)?;
metrics::gauge!("relay.db.actor-id-contact.size")
.set(crate::collector::recordable(inner.actor_id_contact.len()));
Ok(())
})
.await
@ -447,6 +461,12 @@ impl Db {
inner
.media_url_media_id
.insert(url.as_str().as_bytes(), id.as_bytes())?;
metrics::gauge!("relay.db.media-id-media-url.size")
.set(crate::collector::recordable(inner.media_id_media_url.len()));
metrics::gauge!("relay.db.media-url-media-id.size")
.set(crate::collector::recordable(inner.media_url_media_id.len()));
Ok(())
})
.await
@ -538,6 +558,14 @@ impl Db {
inner
.actor_id_actor
.insert(actor.id.as_str().as_bytes(), vec)?;
metrics::gauge!("relay.db.public-key-actor-id.size").set(crate::collector::recordable(
inner.public_key_id_actor_id.len(),
));
metrics::gauge!("relay.db.actor-id-actor.size").set(crate::collector::recordable(
inner.public_key_id_actor_id.len(),
));
Ok(())
})
.await
@ -550,6 +578,10 @@ impl Db {
.connected_actor_ids
.remove(actor_id.as_str().as_bytes())?;
metrics::gauge!("relay.db.connected-actor-ids.size").set(crate::collector::recordable(
inner.connected_actor_ids.len(),
));
Ok(())
})
.await
@ -562,6 +594,10 @@ impl Db {
.connected_actor_ids
.insert(actor_id.as_str().as_bytes(), actor_id.as_str().as_bytes())?;
metrics::gauge!("relay.db.connected-actor-ids.size").set(crate::collector::recordable(
inner.connected_actor_ids.len(),
));
Ok(())
})
.await
@ -569,30 +605,64 @@ impl Db {
pub(crate) async fn add_blocks(&self, domains: Vec<String>) -> Result<(), Error> {
self.unblock(move |inner| {
for connected in inner.connected_by_domain(&domains) {
inner
.connected_actor_ids
.remove(connected.as_str().as_bytes())?;
}
let connected_by_domain = inner.connected_by_domain(&domains).collect::<Vec<_>>();
for authority in &domains {
inner
.blocked_domains
.insert(domain_key(authority), authority.as_bytes())?;
inner.allowed_domains.remove(domain_key(authority))?;
}
let res = (
&inner.connected_actor_ids,
&inner.blocked_domains,
&inner.allowed_domains,
)
.transaction(|(connected, blocked, allowed)| {
let mut connected_batch = Batch::default();
let mut blocked_batch = Batch::default();
let mut allowed_batch = Batch::default();
Ok(())
for connected in &connected_by_domain {
connected_batch.remove(connected.as_str().as_bytes());
}
for authority in &domains {
blocked_batch
.insert(domain_key(authority).as_bytes(), authority.as_bytes());
allowed_batch.remove(domain_key(authority).as_bytes());
}
connected.apply_batch(&connected_batch)?;
blocked.apply_batch(&blocked_batch)?;
allowed.apply_batch(&allowed_batch)?;
Ok(())
});
metrics::gauge!("relay.db.connected-actor-ids.size").set(crate::collector::recordable(
inner.connected_actor_ids.len(),
));
metrics::gauge!("relay.db.blocked-domains.size")
.set(crate::collector::recordable(inner.blocked_domains.len()));
metrics::gauge!("relay.db.allowed-domains.size")
.set(crate::collector::recordable(inner.allowed_domains.len()));
match res {
Ok(()) => Ok(()),
Err(TransactionError::Abort(e) | TransactionError::Storage(e)) => Err(e.into()),
}
})
.await
}
pub(crate) async fn remove_blocks(&self, domains: Vec<String>) -> Result<(), Error> {
self.unblock(move |inner| {
let mut blocked_batch = Batch::default();
for authority in &domains {
inner.blocked_domains.remove(domain_key(authority))?;
blocked_batch.remove(domain_key(authority).as_bytes());
}
inner.blocked_domains.apply_batch(blocked_batch)?;
metrics::gauge!("relay.db.blocked-domains.size")
.set(crate::collector::recordable(inner.blocked_domains.len()));
Ok(())
})
.await
@ -600,12 +670,17 @@ impl Db {
pub(crate) async fn add_allows(&self, domains: Vec<String>) -> Result<(), Error> {
self.unblock(move |inner| {
let mut allowed_batch = Batch::default();
for authority in &domains {
inner
.allowed_domains
.insert(domain_key(authority), authority.as_bytes())?;
allowed_batch.insert(domain_key(authority).as_bytes(), authority.as_bytes());
}
inner.allowed_domains.apply_batch(allowed_batch)?;
metrics::gauge!("relay.db.allowed-domains.size")
.set(crate::collector::recordable(inner.allowed_domains.len()));
Ok(())
})
.await
@ -614,17 +689,32 @@ impl Db {
pub(crate) async fn remove_allows(&self, domains: Vec<String>) -> Result<(), Error> {
self.unblock(move |inner| {
if inner.restricted_mode {
for connected in inner.connected_by_domain(&domains) {
inner
.connected_actor_ids
.remove(connected.as_str().as_bytes())?;
let connected_by_domain = inner.connected_by_domain(&domains).collect::<Vec<_>>();
let mut connected_batch = Batch::default();
for connected in &connected_by_domain {
connected_batch.remove(connected.as_str().as_bytes());
}
inner.connected_actor_ids.apply_batch(connected_batch)?;
metrics::gauge!("relay.db.connected-actor-ids.size").set(
crate::collector::recordable(inner.connected_actor_ids.len()),
);
}
let mut allowed_batch = Batch::default();
for authority in &domains {
inner.allowed_domains.remove(domain_key(authority))?;
allowed_batch.remove(domain_key(authority).as_bytes());
}
inner.allowed_domains.apply_batch(allowed_batch)?;
metrics::gauge!("relay.db.allowed-domains.size")
.set(crate::collector::recordable(inner.allowed_domains.len()));
Ok(())
})
.await
@ -665,6 +755,10 @@ impl Db {
inner
.settings
.insert("private-key".as_bytes(), pem_pkcs8.as_bytes())?;
metrics::gauge!("relay.db.settings.size")
.set(crate::collector::recordable(inner.settings.len()));
Ok(())
})
.await

View file

@ -40,7 +40,12 @@ fn debug_object(activity: &serde_json::Value) -> &serde_json::Value {
object
}
pub(crate) fn build_storage() -> MetricsStorage<Storage<TokioTimer>> {
MetricsStorage::wrap(Storage::new(TokioTimer))
}
pub(crate) fn create_workers(
storage: MetricsStorage<Storage<TokioTimer>>,
state: State,
actors: ActorCache,
media: MediaCache,
@ -48,18 +53,15 @@ pub(crate) fn create_workers(
) -> std::io::Result<JobServer> {
let deliver_concurrency = config.deliver_concurrency();
let queue_handle = WorkerConfig::new(
MetricsStorage::wrap(Storage::new(TokioTimer)),
move |queue_handle| {
JobState::new(
state.clone(),
actors.clone(),
JobServer::new(queue_handle),
media.clone(),
config.clone(),
)
},
)
let queue_handle = WorkerConfig::new(storage, move |queue_handle| {
JobState::new(
state.clone(),
actors.clone(),
JobServer::new(queue_handle),
media.clone(),
config.clone(),
)
})
.register::<Deliver>()
.register::<DeliverMany>()
.register::<QueryNodeinfo>()

View file

@ -321,10 +321,16 @@ async fn server_main(
let sign_spawner2 = sign_spawner.clone();
let verify_spawner2 = verify_spawner.clone();
let config2 = config.clone();
let job_store = jobs::build_storage();
let server = HttpServer::new(move || {
let job_server =
create_workers(state.clone(), actors.clone(), media.clone(), config.clone())
.expect("Failed to create job server");
let job_server = create_workers(
job_store.clone(),
state.clone(),
actors.clone(),
media.clone(),
config.clone(),
)
.expect("Failed to create job server");
let app = App::new()
.app_data(web::Data::new(db.clone()))

View file

@ -80,7 +80,7 @@ where
fn call(&self, req: ServiceRequest) -> Self::Future {
let log_on_drop = LogOnDrop {
begin: Instant::now(),
path: req.path().to_string(),
path: format!("{:?}", req.match_pattern()),
method: req.method().to_string(),
arm: false,
};