Serialization works (step 6 untested, step 8.1 untested)

This commit is contained in:
asonix 2022-12-14 23:35:53 -06:00
parent 57956e329d
commit e8f119847a
5 changed files with 1028 additions and 255 deletions

View file

@ -4,7 +4,7 @@ version = "0.1.0"
edition = "2021"
[[example]]
name = "masto"
name = "normalize_ap"
required-features = ["rustcrypto-sha2"]
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View file

@ -7,6 +7,7 @@ edition = "2021"
[dependencies]
contextual = "0.1.3"
indexmap = "1.9.2"
iref = "2.2.0"
json-ld = "0.9.1"
json-ld-syntax = "0.9.1"
@ -16,3 +17,10 @@ locspan = "0.7.9"
rdf-types = "0.12.4"
smallvec = "1.10.0"
static-iref = "2.0.0"
[dev-dependencies]
json-ld = { version = "0.9.1", features = ["reqwest"] }
json-ld-normalization = { path = "../normalization" }
reqwest = "0.11.13"
sha2 = "0.10"
tokio = { version = "1", features = ["full"] }

View file

@ -0,0 +1,199 @@
use iref::Iri;
use json_ld::{
syntax::Parse, Compact, ExpandedDocument, JsonLdProcessor, Print, Process, RemoteDocument,
RemoteDocumentReference, ReqwestLoader, TryFromJson,
};
use json_ld_syntax::TryFromJson as _;
use locspan::{Location, Span};
use rdf_types::{generator::Blank, IriVocabularyMut};
use reqwest::Client;
use static_iref::iri;
const SIMPLE_CONTEXT: &str = r#"[
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1"
]"#;
const MASTODON_CONTEXT: &str = r#"[
"https://www.w3.org/ns/activitystreams",
"https://w3id.org/security/v1",
{
"manuallyApprovesFollowers":"as:manuallyApprovesFollowers",
"toot":"http://joinmastodon.org/ns#",
"featured":{
"@id":"toot:featured",
"@type":"@id"
},
"featuredTags":{
"@id":"toot:featuredTags",
"@type":"@id"
},
"alsoKnownAs":{
"@id":"as:alsoKnownAs",
"@type":"@id"
},
"movedTo":{
"@id":"as:movedTo",
"@type":"@id"
},
"schema":"http://schema.org#",
"PropertyValue":"schema:PropertyValue",
"value":"schema:value",
"discoverable":"toot:discoverable",
"Device":"toot:Device",
"Ed25519Signature":"toot:Ed25519Signature",
"Ed25519Key":"toot:Ed25519Key",
"Curve25519Key":"toot:Curve25519Key",
"EncryptedMessage":"toot:EncryptedMessage",
"publicKeyBase64":"toot:publicKeyBase64",
"deviceId":"toot:deviceId",
"claim":{
"@type":"@id",
"@id":"toot:claim"
},
"fingerprintKey":{
"@type":"@id",
"@id":"toot:fingerprintKey"
},
"identityKey":{
"@type":"@id",
"@id":"toot:identityKey"
},
"devices":{
"@type":"@id",
"@id":"toot:devices"
},
"messageFranking":"toot:messageFranking",
"messageType":"toot:messageType",
"cipherText":"toot:cipherText",
"suspended":"toot:suspended",
"Hashtag": "as:Hashtag",
"focalPoint":{
"@container":"@list",
"@id":"toot:focalPoint"
}
}
]"#;
type AnyError = Box<dyn std::error::Error + Send + Sync>;
#[tokio::main]
async fn main() -> Result<(), AnyError> {
let client = Client::builder()
.user_agent("json-ld-playground")
.build()
.expect("Successful client");
let iris = [
iri!("https://relay.asonix.dog/actor"),
iri!("https://masto.asonix.dog/actor"),
iri!("https://masto.asonix.dog/users/asonix"),
iri!("https://masto.asonix.dog/users/kumu"),
iri!("https://yiff.life/users/6my"),
iri!("https://meow.social/users/6my"),
];
for iri in iris {
let document = client
.get(iri.as_str())
.header("accept", "application/activity+json")
.send()
.await?
.text()
.await?;
normalize_document(iri, &document).await?;
}
Ok(())
}
async fn normalize_document(iri: Iri<'static>, document: &str) -> Result<(), AnyError> {
let mut vocabulary: rdf_types::IndexVocabulary = rdf_types::IndexVocabulary::new();
let iri_index = vocabulary.insert(iri);
let input = RemoteDocument::new(
Some(iri_index.clone()),
Some("application/activity+json".parse()?),
json_ld_syntax::Value::parse_str(document, |span| Location::new(iri_index.clone(), span))
.expect("Failed to parse"),
);
let mut loader = ReqwestLoader::default();
let mut expanded = input
.expand_with(&mut vocabulary, &mut loader)
.await
.expect("Failed to expand");
let mut pre_gen = Blank::new().with_metadata(Location::new(iri_index.clone(), Span::default()));
expanded.identify_all_with(&mut vocabulary, &mut pre_gen);
let output_document = json_ld_normalization::normalize::<_, _, _, sha2::Sha256>(
&mut vocabulary,
iri_index,
expanded.0,
true,
)?;
let serialized = json_ld_serialization::rdf_to_json_ld(
output_document.into_quads(),
Location::new(iri_index.clone(), Span::default()),
true,
None,
true,
false,
&vocabulary,
)
.expect("Failed to normalize");
println!("{}", serialized.pretty_print());
let expanded = ExpandedDocument::try_from_json_in(&mut vocabulary, serialized)
.expect("Invalid expanded json");
for context in [MASTODON_CONTEXT, SIMPLE_CONTEXT] {
let context = RemoteDocumentReference::Loaded(RemoteDocument::new(
Some(iri_index.clone()),
Some("application/ld+json".parse()?),
json_ld_syntax::context::Value::try_from_json(
json_ld_syntax::Value::parse_str(context, |span| {
Location::new(iri_index.clone(), span)
})
.expect("Failed to parse"),
)
.expect("Failed to parse context"),
))
.load_context_with(&mut vocabulary, &mut loader)
.await
.expect("Context is loaded")
.into_document();
let processed = context
.process(&mut vocabulary, &mut loader, None)
.await
.expect("Failed to process context");
let compacted = expanded
.compact_full(
&mut vocabulary,
processed.as_ref(),
&mut loader,
json_ld::compaction::Options {
processing_mode: json_ld::ProcessingMode::JsonLd1_1,
compact_to_relative: true,
compact_arrays: true,
ordered: true,
},
)
.await
.map_err(|_| ())
.expect("Failed to compact");
println!("output: {}", compacted.pretty_print());
}
Ok(())
}

File diff suppressed because it is too large Load diff