Compare commits

...

11 commits
main ... v0.3.x

Author SHA1 Message Date
asonix f26795af5a Bump version
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2023-05-08 11:34:16 -05:00
asonix 32786901b7 Fix nix build
Some checks failed
continuous-integration/drone/push Build is failing
2023-05-08 11:23:01 -05:00
asonix a12b272a40 Update opentelemetry features, remove 0.17
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 11:13:33 -05:00
asonix 15d0f3e2ba Update opentelemetry (not to latest)
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 11:00:36 -05:00
asonix 7ae9a3d993 Clippy
All checks were successful
continuous-integration/drone/push Build is passing
2023-05-08 10:50:11 -05:00
asonix 240e057f19 Update base64 2023-05-08 10:48:27 -05:00
asonix 4dff8de985 clippy
All checks were successful
continuous-integration/drone/push Build is passing
continuous-integration/drone/tag Build is passing
2022-09-10 11:04:03 -05:00
asonix 276eac29a6 Bump version
Some checks reported errors
continuous-integration/drone/push Build was killed
2022-09-10 11:02:10 -05:00
asonix ea60fe7e1d Update dependencies 2022-09-10 11:01:28 -05:00
Aode (lion) 885567b4e7 Update env variable documentation with correct prefix
Some checks failed
continuous-integration/drone/push Build is failing
2022-05-27 15:28:05 -05:00
Aode (lion) b972e55587 Add pnm module permission for imagemagick
All checks were successful
continuous-integration/drone/tag Build is passing
continuous-integration/drone/push Build is passing
2022-04-03 16:43:14 -05:00
18 changed files with 1051 additions and 729 deletions

3
.gitignore vendored
View file

@ -3,3 +3,6 @@
/docker/dev/volumes /docker/dev/volumes
/client-examples/javascript/node_modules /client-examples/javascript/node_modules
/docker/object-storage/storage /docker/object-storage/storage
/.envrc
/.direnv
/.ash_history

1512
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,7 +1,7 @@
[package] [package]
name = "pict-rs" name = "pict-rs"
description = "A simple image hosting service" description = "A simple image hosting service"
version = "0.3.0" version = "0.3.3"
authors = ["asonix <asonix@asonix.dog>"] authors = ["asonix <asonix@asonix.dog>"]
license = "AGPL-3.0" license = "AGPL-3.0"
readme = "README.md" readme = "README.md"
@ -27,7 +27,7 @@ actix-web = { version = "4.0.0", default-features = false }
anyhow = "1.0" anyhow = "1.0"
async-trait = "0.1.51" async-trait = "0.1.51"
awc = { version = "3.0.0", default-features = false, features = ["rustls"] } awc = { version = "3.0.0", default-features = false, features = ["rustls"] }
base64 = "0.13.0" base64 = "0.21.0"
config = "0.13.0" config = "0.13.0"
console-subscriber = "0.1" console-subscriber = "0.1"
dashmap = "5.1.0" dashmap = "5.1.0"
@ -35,14 +35,14 @@ futures-util = "0.3.17"
mime = "0.3.1" mime = "0.3.1"
num_cpus = "1.13" num_cpus = "1.13"
once_cell = "1.4.0" once_cell = "1.4.0"
opentelemetry = { version = "0.17", features = ["rt-tokio"] } opentelemetry = { version = "0.18", features = ["rt-tokio"] }
opentelemetry-otlp = "0.10" opentelemetry-otlp = "0.11"
pin-project-lite = "0.2.7" pin-project-lite = "0.2.7"
reqwest = { version = "0.11.5", default-features = false, features = [ reqwest = { version = "0.11.5", default-features = false, features = [
"rustls-tls", "rustls-tls",
"stream", "stream",
], optional = true } ], optional = true }
rust-s3 = { version = "0.29.0", default-features = false, features = [ rust-s3 = { version = "0.31.0", default-features = false, features = [
"fail-on-err", "fail-on-err",
"with-reqwest", "with-reqwest",
], optional = true, git = "https://github.com/asonix/rust-s3", branch = "asonix/generic-client" } ], optional = true, git = "https://github.com/asonix/rust-s3", branch = "asonix/generic-client" }
@ -55,27 +55,27 @@ structopt = "0.3.14"
thiserror = "1.0" thiserror = "1.0"
time = { version = "0.3.0", features = ["serde"] } time = { version = "0.3.0", features = ["serde"] }
tokio = { version = "1", features = ["full", "tracing"] } tokio = { version = "1", features = ["full", "tracing"] }
tokio-uring = { version = "0.3", optional = true, features = ["bytes"] } tokio-uring = { version = "0.4", optional = true, features = ["bytes"] }
tokio-util = { version = "0.7", default-features = false, features = ["codec"] } tokio-util = { version = "0.7", default-features = false, features = ["codec"] }
tracing = "0.1.15" tracing = "0.1.15"
tracing-error = "0.2.0" tracing-error = "0.2.0"
tracing-futures = "0.2.4" tracing-futures = "0.2.4"
tracing-log = "0.1.2" tracing-log = "0.1.2"
tracing-opentelemetry = "0.17" tracing-opentelemetry = "0.18"
tracing-subscriber = { version = "0.3.0", features = [ tracing-subscriber = { version = "0.3.0", features = [
"env-filter", "env-filter",
"fmt", "fmt",
"tracing-log", "tracing-log",
] } ] }
url = { version = "2.2", features = ["serde"] } url = { version = "2.2", features = ["serde"] }
uuid = { version = "0.8.2", features = ["v4", "serde"] } uuid = { version = "1.1.2", features = ["v4", "serde"] }
[dependencies.tracing-actix-web] [dependencies.tracing-actix-web]
version = "0.5.0" version = "0.7.0"
default-features = false default-features = false
features = ["emit_event_on_error", "opentelemetry_0_17"] features = ["emit_event_on_error", "opentelemetry_0_18"]
[dependencies.tracing-awc] [dependencies.tracing-awc]
version = "0.1.0" version = "0.1.6"
default-features = false default-features = false
features = ["emit_event_on_error", "opentelemetry_0_17"] features = ["emit_event_on_error", "opentelemetry_0_18"]

View file

@ -9,7 +9,7 @@ _a simple image hosting service_
## Usage ## Usage
### Running ### Running
``` ```
pict-rs 0.3.0 pict-rs 0.3.1
USAGE: USAGE:
pict-rs [FLAGS] [OPTIONS] [SUBCOMMAND] pict-rs [FLAGS] [OPTIONS] [SUBCOMMAND]
@ -56,7 +56,7 @@ SUBCOMMANDS:
``` ```
``` ```
pict-rs-file-store 0.3.0 pict-rs-file-store 0.3.1
USAGE: USAGE:
pict-rs file-store [OPTIONS] pict-rs file-store [OPTIONS]
@ -70,7 +70,7 @@ OPTIONS:
``` ```
``` ```
pict-rs-s3-store 0.3.0 pict-rs-s3-store 0.3.1
USAGE: USAGE:
pict-rs s3-store [OPTIONS] --bucket-name <bucket-name> --region <region> pict-rs s3-store [OPTIONS] --bucket-name <bucket-name> --region <region>
@ -239,7 +239,7 @@ pict-rs offers the following endpoints:
The following endpoints are protected by an API key via the `X-Api-Token` header, and are disabled The following endpoints are protected by an API key via the `X-Api-Token` header, and are disabled
unless the `--api-key` option is passed to the binary or the PICTRS_API_KEY environment variable is unless the `--api-key` option is passed to the binary or the PICTRS__API_KEY environment variable is
set. set.
A secure API key can be generated by any password generator. A secure API key can be generated by any password generator.

View file

@ -15,7 +15,7 @@
<policy domain="delegate" rights="execute" pattern="ffmpeg" /> <policy domain="delegate" rights="execute" pattern="ffmpeg" />
<policy domain="filter" rights="none" pattern="*" /> <policy domain="filter" rights="none" pattern="*" />
<policy domain="module" rights="none" pattern="*" /> <policy domain="module" rights="none" pattern="*" />
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,VIDEO}" /> <policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,PNM,VIDEO}" />
<!-- indirect reads not permitted --> <!-- indirect reads not permitted -->
<policy domain="system" name="precision" value="6" /> <policy domain="system" name="precision" value="6" />
</policymap> </policymap>

View file

@ -15,7 +15,7 @@
<policy domain="delegate" rights="execute" pattern="ffmpeg" /> <policy domain="delegate" rights="execute" pattern="ffmpeg" />
<policy domain="filter" rights="none" pattern="*" /> <policy domain="filter" rights="none" pattern="*" />
<policy domain="module" rights="none" pattern="*" /> <policy domain="module" rights="none" pattern="*" />
<policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,VIDEO}" /> <policy domain="module" rights="read | write" pattern="{GIF,JPEG,PNG,WEBP,TMP,PAM,PNM,VIDEO}" />
<!-- indirect reads not permitted --> <!-- indirect reads not permitted -->
<policy domain="system" name="precision" value="6" /> <policy domain="system" name="precision" value="6" />
</policymap> </policymap>

61
flake.lock Normal file
View file

@ -0,0 +1,61 @@
{
"nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1681202837,
"narHash": "sha256-H+Rh19JDwRtpVPAWp64F+rlEtxUWBAQW28eAi3SRSzg=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "cfacdce06f30d2b68473a46042957675eebb3401",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nixpkgs": {
"locked": {
"lastModified": 1683408522,
"narHash": "sha256-9kcPh6Uxo17a3kK3XCHhcWiV1Yu1kYj22RHiymUhMkU=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "897876e4c484f1e8f92009fd11b7d988a121a4e7",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"flake-utils": "flake-utils",
"nixpkgs": "nixpkgs"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

50
flake.nix Normal file
View file

@ -0,0 +1,50 @@
{
description = "pict-rs";
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
flake-utils.url = "github:numtide/flake-utils";
};
outputs = { self, nixpkgs, flake-utils }:
flake-utils.lib.eachDefaultSystem (system:
let
pkgs = import nixpkgs {
inherit system;
};
in
{
packages = rec {
pict-rs = pkgs.callPackage ./pict-rs.nix {
inherit (pkgs.darwin.apple_sdk.frameworks) Security;
};
default = pict-rs;
};
apps = rec {
dev = flake-utils.lib.mkApp { drv = self.packages.${system}.pict-rs; };
default = dev;
};
devShell = with pkgs; mkShell {
nativeBuildInputs = [
cargo
cargo-outdated
cargo-zigbuild
clippy
imagemagick
ffmpeg_5-full
gcc
imagemagick
protobuf
rust-analyzer
rustc
rustfmt
taplo
];
RUST_SRC_PATH = "${pkgs.rust.packages.stable.rustPlatform.rustLibSrc}";
};
});
}

42
pict-rs.nix Normal file
View file

@ -0,0 +1,42 @@
{ exiftool
, ffmpeg_5-full
, imagemagick
, lib
, makeWrapper
, nixosTests
, protobuf
, rustPlatform
, Security
, stdenv
}:
rustPlatform.buildRustPackage {
pname = "pict-rs";
version = "0.3.3";
src = ./.;
cargoLock = {
lockFile = ./Cargo.lock;
outputHashes = {
"aws-creds-0.29.1" = "bwDFmDPThMLrpaB7cAj/2/vJKhbX6/DqgcIRBVKSZhg=";
};
};
PROTOC = "${protobuf}/bin/protoc";
PROTOC_INCLUDE = "${protobuf}/include";
nativeBuildInputs = [ makeWrapper ];
buildInputs = lib.optionals stdenv.isDarwin [ Security ];
postInstall = ''
wrapProgram $out/bin/pict-rs \
--prefix PATH : "${lib.makeBinPath [ imagemagick ffmpeg_5-full exiftool ]}"
'';
passthru.tests = { inherit (nixosTests) pict-rs; };
meta = with lib; {
description = "A simple image hosting service";
homepage = "https://git.asonix.dog/asonix/pict-rs";
license = with licenses; [ agpl3Plus ];
};
}

View file

@ -1,14 +1,14 @@
## Required: path to store pict-rs database ## Required: path to store pict-rs database
# environment variable: PICTRS_PATH # environment variable: PICTRS__PATH
path = './data' path = './data'
## Optional: pict-rs binding address ## Optional: pict-rs binding address
# environment variable: PICTRS_ADDR # environment variable: PICTRS__ADDR
# default: 0.0.0.0:8080 # default: 0.0.0.0:8080
addr = '0.0.0.0:8080' addr = '0.0.0.0:8080'
## Optional: format to transcode all uploaded images ## Optional: format to transcode all uploaded images
# environment variable: PICTRS_IMAGE_FORMAT # environment variable: PICTRS__IMAGE_FORMAT
# valid options: 'jpeg', 'png', 'webp' # valid options: 'jpeg', 'png', 'webp'
# default: empty # default: empty
# #
@ -17,7 +17,7 @@ addr = '0.0.0.0:8080'
image_format = 'jpeg' image_format = 'jpeg'
## Optional: permitted image processing filters ## Optional: permitted image processing filters
# environment variable: PICTRS_FILTERS # environment variable: PICTRS__FILTERS
# valid options: 'identity', 'thumbnail', 'resize', 'crop', 'blur' # valid options: 'identity', 'thumbnail', 'resize', 'crop', 'blur'
# default: empty # default: empty
# #
@ -25,33 +25,33 @@ image_format = 'jpeg'
filters = ['identity', 'thumbnail', 'resize', 'crop', 'blur'] filters = ['identity', 'thumbnail', 'resize', 'crop', 'blur']
## Optional: image bounds ## Optional: image bounds
# environment variable: PICTRS_MAX_FILE_SIZE # environment variable: PICTRS__MAX_FILE_SIZE
# default: 40 # default: 40
max_file_size = 40 # in Megabytes max_file_size = 40 # in Megabytes
# environment variable: PICTRS_MAX_IMAGE_WIDTH # environment variable: PICTRS__MAX_IMAGE_WIDTH
# default: 10,000 # default: 10,000
max_image_width = 10_000 # in Pixels max_image_width = 10_000 # in Pixels
# environment variable: PICTRS_MAX_IMAGE_HEIGHT # environment variable: PICTRS__MAX_IMAGE_HEIGHT
# default: 10,000 # default: 10,000
max_image_height = 10_000 # in Pixels max_image_height = 10_000 # in Pixels
# environment variable: PICTRS_MAX_IMAGE_AREA # environment variable: PICTRS__MAX_IMAGE_AREA
# default: 40,000,000 # default: 40,000,000
max_image_area = 40_000_000 # in Pixels max_image_area = 40_000_000 # in Pixels
## Optional: skip image validation on the import endpoint ## Optional: skip image validation on the import endpoint
# environment variable: PICTRS_SKIP_VALIDATE_IMPORTS # environment variable: PICTRS__SKIP_VALIDATE_IMPORTS
# default: false # default: false
skip_validate_imports = false skip_validate_imports = false
## Optional: set sled's cache capacity to a given number of bytes ## Optional: set sled's cache capacity to a given number of bytes
# environment variable: PICTRS_SLED_CACHE_CAPACITY # environment variable: PICTRS__SLED_CACHE_CAPACITY
# default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB # default: 67_108_864 (1024 * 1024 * 64) e.g. 64MB
# #
# Increasing this value can improve performance by keeping more of the database in RAM # Increasing this value can improve performance by keeping more of the database in RAM
sled_cache_capacity = 67_108_864 # in bytes sled_cache_capacity = 67_108_864 # in bytes
## Optional: enable tokio-console and set the event buffer size ## Optional: enable tokio-console and set the event buffer size
# environment variable: PICTRS_CONSOLE_BUFFER_CAPACITY # environment variable: PICTRS__CONSOLE_BUFFER_CAPACITY
# default: empty # default: empty
# #
# NOTE: this is the number of _events_ to buffer, not the number of bytes. In reality, the amount of # NOTE: this is the number of _events_ to buffer, not the number of bytes. In reality, the amount of
@ -82,14 +82,14 @@ sled_cache_capacity = 67_108_864 # in bytes
console_buffer_capacity = 102_400 # 102_400 (or 1024 * 100) is the default size used by console-subscriber console_buffer_capacity = 102_400 # 102_400 (or 1024 * 100) is the default size used by console-subscriber
## Optional: shared secret for internal endpoints ## Optional: shared secret for internal endpoints
# environment variable: PICTRS_API_KEY # environment variable: PICTRS__API_KEY
# default: empty # default: empty
# #
# Not specifying api_key disables internal endpoints # Not specifying api_key disables internal endpoints
api_key = 'API_KEY' api_key = 'API_KEY'
## Optional: url for exporting otlp traces ## Optional: url for exporting otlp traces
# environment variable: PICTRS_OPENTELEMETRY_URL # environment variable: PICTRS__OPENTELEMETRY_URL
# default: empty # default: empty
# #
# Not specifying opentelemetry_url means no traces will be exported # Not specifying opentelemetry_url means no traces will be exported
@ -105,11 +105,11 @@ type = "file_store"
## Example file store ## Example file store
# [store] # [store]
# #
# # environment variable: PICTRS_STORE__TYPE # # environment variable: PICTRS__STORE__TYPE
# type = 'file_store' # type = 'file_store'
# #
# # Optional: file path # # Optional: file path
# # environment variable: PICTRS_STORE__PATH # # environment variable: PICTRS__STORE__PATH
# # default: empty # # default: empty
# # # #
# # Not specifying path means pict-rs' top-level `path` config is used # # Not specifying path means pict-rs' top-level `path` config is used
@ -118,35 +118,35 @@ type = "file_store"
## Example s3 store ## Example s3 store
# [store] # [store]
# #
# # environment variable: PICTRS_STORE__TYPE # # environment variable: PICTRS__STORE__TYPE
# type = 's3_store' # type = 's3_store'
# #
# # Required: bucket name # # Required: bucket name
# # environment variable: PICTRS_STORE__BUCKET_NAME # # environment variable: PICTRS__STORE__BUCKET_NAME
# bucket_name = 'rust_s3' # bucket_name = 'rust_s3'
# #
# # Required: bucket region # # Required: bucket region
# # environment variable: PICTRS_STORE__REGION # # environment variable: PICTRS__STORE__REGION
# # # #
# # can also be endpoint of local s3 store, e.g. 'http://minio:9000' # # can also be endpoint of local s3 store, e.g. 'http://minio:9000'
# region = 'eu-central-1' # region = 'eu-central-1'
# #
# # Optional: bucket access key # # Optional: bucket access key
# # environment variable: PICTRS_STORE__ACCESS_KEY # # environment variable: PICTRS__STORE__ACCESS_KEY
# # default: empty # # default: empty
# access_key = 'ACCESS_KEY' # access_key = 'ACCESS_KEY'
# #
# # Optional: bucket secret key # # Optional: bucket secret key
# # environment variable: PICTRS_STORE__SECRET_KEY # # environment variable: PICTRS__STORE__SECRET_KEY
# # default: empty # # default: empty
# secret_key = 'SECRET_KEY' # secret_key = 'SECRET_KEY'
# #
# # Optional: bucket security token # # Optional: bucket security token
# # environment variable: PICTRS_STORE__SECURITY_TOKEN # # environment variable: PICTRS__STORE__SECURITY_TOKEN
# # default: empty # # default: empty
# security_token = 'SECURITY_TOKEN' # security_token = 'SECURITY_TOKEN'
# #
# # Optional: bucket session token # # Optional: bucket session token
# # environment variable: PICTRS_STORE__SESSION_TOKEN # # environment variable: PICTRS__STORE__SESSION_TOKEN
# # default: empty # # default: empty
# session_token = 'SESSION_TOKEN' # session_token = 'SESSION_TOKEN'

View file

@ -114,7 +114,7 @@ impl Drop for CancelToken {
fn drop(&mut self) { fn drop(&mut self) {
if self.receiver.is_none() { if self.receiver.is_none() {
let completed = PROCESS_MAP.remove(&self.path).is_none(); let completed = PROCESS_MAP.remove(&self.path).is_none();
self.span.record("completed", &completed); self.span.record("completed", completed);
} }
} }
} }

View file

@ -180,7 +180,7 @@ where
parse_details(s) parse_details(s)
} }
pub(crate) async fn details_file(path_str: &str) -> Result<Details, Error> { async fn details_file(path_str: &str) -> Result<Details, Error> {
let process = Process::run( let process = Process::run(
"magick", "magick",
&["identify", "-ping", "-format", "%w %h | %m\n", path_str], &["identify", "-ping", "-format", "%w %h | %m\n", path_str],

View file

@ -46,7 +46,7 @@ impl SledDb for sled034::Db {
} }
fn self_tree(&self) -> &Self::SledTree { fn self_tree(&self) -> &Self::SledTree {
&*self self
} }
} }

View file

@ -212,8 +212,8 @@ where
if let Ok(err) = res { if let Ok(err) = res {
let display = format!("{}", err); let display = format!("{}", err);
let debug = format!("{:?}", err); let debug = format!("{:?}", err);
span.record("exception.message", &display.as_str()); span.record("exception.message", display.as_str());
span.record("exception.details", &debug.as_str()); span.record("exception.details", debug.as_str());
return Poll::Ready(Err(err)); return Poll::Ready(Err(err));
} }
} }
@ -223,8 +223,8 @@ where
if let Err(err) = &res { if let Err(err) = &res {
let display = format!("{}", err); let display = format!("{}", err);
let debug = format!("{:?}", err); let debug = format!("{:?}", err);
span.record("exception.message", &display.as_str()); span.record("exception.message", display.as_str());
span.record("exception.details", &debug.as_str()); span.record("exception.details", debug.as_str());
} }
return Poll::Ready(res); return Poll::Ready(res);
} }

View file

@ -17,7 +17,7 @@ pub(crate) fn chop_bytes(
if let Some((start, end)) = byte_range.to_satisfiable_range(length) { if let Some((start, end)) = byte_range.to_satisfiable_range(length) {
// END IS INCLUSIVE // END IS INCLUSIVE
let end = end as usize + 1; let end = end as usize + 1;
return Ok(once(ready(Ok(bytes.slice(start as usize..end as usize))))); return Ok(once(ready(Ok(bytes.slice(start as usize..end)))));
} }
Err(UploadError::Range.into()) Err(UploadError::Range.into())

View file

@ -2,7 +2,8 @@ use crate::store::Store;
use actix_web::web::Bytes; use actix_web::web::Bytes;
use futures_util::stream::Stream; use futures_util::stream::Stream;
use s3::{ use s3::{
client::Client, command::Command, creds::Credentials, request_trait::Request, Bucket, Region, client::Client, command::Command, creds::Credentials, error::S3Error, request_trait::Request,
Bucket, Region,
}; };
use std::{ use std::{
pin::Pin, pin::Pin,
@ -31,6 +32,9 @@ pub(crate) enum ObjectError {
#[error(transparent)] #[error(transparent)]
Utf8(#[from] FromUtf8Error), Utf8(#[from] FromUtf8Error),
#[error(transparent)]
Upstream(#[from] S3Error),
#[error("Invalid length")] #[error("Invalid length")]
Length, Length,
@ -171,7 +175,7 @@ impl ObjectStore {
Ok(ObjectStore { Ok(ObjectStore {
path_gen, path_gen,
settings_tree, settings_tree,
bucket: Bucket::new_with_path_style( bucket: Bucket::new(
bucket_name, bucket_name,
match region { match region {
Region::Custom { endpoint, .. } => Region::Custom { Region::Custom { endpoint, .. } => Region::Custom {
@ -186,7 +190,8 @@ impl ObjectStore {
security_token, security_token,
session_token, session_token,
}, },
)?, )?
.with_path_style(),
client, client,
}) })
} }

View file

@ -1,5 +1,6 @@
use crate::error::Error; use crate::error::Error;
use actix_web::web; use actix_web::web;
use base64::engine::{Engine, general_purpose::STANDARD};
use sha2::{digest::FixedOutputReset, Digest}; use sha2::{digest::FixedOutputReset, Digest};
use std::{ use std::{
pin::Pin, pin::Pin,
@ -79,7 +80,7 @@ where
impl std::fmt::Debug for Hash { impl std::fmt::Debug for Hash {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
write!(f, "{}", base64::encode(&self.inner)) write!(f, "{}", STANDARD.encode(&self.inner))
} }
} }

View file

@ -89,7 +89,7 @@ where
debug!("Remove hash/id -> alias mapping"); debug!("Remove hash/id -> alias mapping");
let id = String::from_utf8_lossy(&id); let id = String::from_utf8_lossy(&id);
let key = alias_key(&hash, &id); let key = alias_key(&hash, &id);
let _ = manager.inner.main_tree.remove(&key); let _ = manager.inner.main_tree.remove(key);
} }
let _ = manager.check_delete_files(store, hash).await; let _ = manager.check_delete_files(store, hash).await;