It compiles and runs and produces blank nodes, but i'm not yet sure if it is correct

This commit is contained in:
asonix 2022-12-11 11:43:26 -06:00
parent a1f40b3f67
commit 980fd91acb
3 changed files with 35 additions and 38 deletions

View file

@ -2,10 +2,10 @@ use contextual::WithContext;
use iref::{Iri, IriBuf};
use json_ld::{
syntax::{parse::MetaError, Parse, Value},
JsonLdProcessor, Loader, RemoteDocument,
Flatten, JsonLdProcessor, Loader, RemoteDocument,
};
use locspan::{Location, Meta};
use rdf_types::{vocabulary::Index, IriVocabulary, IriVocabularyMut};
use locspan::{Location, Meta, Span};
use rdf_types::{generator::Blank, vocabulary::Index, IriVocabulary, IriVocabularyMut};
use reqwest::Client;
use static_iref::iri;
use std::{
@ -198,9 +198,8 @@ async fn do_the_thing(
let input = RemoteDocument::new(
Some(iri_index.clone()),
Some("application/activity+json".parse().expect("Invalid mime")),
Value::parse_str(document, |span| Location::new(iri_index, span))
.expect("Unable to parse actor"),
Some("application/activity+json".parse()?),
Value::parse_str(document, |span| Location::new(iri_index, span)).expect("Failed to parse"),
);
let mut loader = ReqwestLoader::default_with_cache(cache);
@ -208,15 +207,20 @@ async fn do_the_thing(
let expanded = input
.expand_with(&mut vocabulary, &mut loader)
.await
.expect("Expansion failed");
.expect("Failed to expand");
let output_document = json_ld_normalization::normalize::<_, _, sha2::Sha256>(
let mut pre_gen = Blank::new().with_metadata(Location::new(iri_index, Span::default()));
let flattened = expanded
.flatten_with(&mut vocabulary, &mut pre_gen, true)
.expect("Failed to flatten");
let output_document = json_ld_normalization::normalize::<_, _, _, sha2::Sha256>(
&mut vocabulary,
iri_index,
expanded,
flattened.0,
true,
)
.expect("Document is not time-complex");
)?;
for quad in output_document.quads {
let (subject, predicate, object, graph) = quad.into_parts();

View file

@ -1,4 +1,3 @@
use crate::Expanded;
use json_ld::{RdfQuads, ValidId as Subject};
use locspan::Location;
use rdf_types::{
@ -31,18 +30,19 @@ impl<N> InputDataset<N>
where
N: Vocabulary,
{
pub(crate) fn from_expanded<D>(
expanded: Expanded<N, D>,
pub(crate) fn from_rdf_quads<R, D>(
rdf_quads: R,
vocabulary: &mut N,
generator: &mut BlankNodeGenerator<D>,
) -> InputDataset<N>
where
R: RdfQuads<N::Iri, N::BlankId, Location<D>>,
D: Clone,
N: VocabularyMut,
N::Iri: Clone + Eq + Hash + Send + Sync,
N::BlankId: Clone + Eq + Hash + Send + Sync,
{
let quads = expanded
let quads = rdf_quads
.rdf_quads_with(vocabulary, generator, None)
.map(|quad| {
let (subject, predicate, object, graph) = quad.into_parts();

View file

@ -1,12 +1,9 @@
use contextual::WithContext;
use indexmap::IndexMap;
use itertools::Itertools;
use json_ld::{rdf::Value, ExpandedDocument, ValidId as Subject};
use locspan::{Location, Meta, Span};
use rdf_types::{
generator::Blank, BlankIdVocabulary, BlankIdVocabularyMut, IriVocabulary, Vocabulary,
VocabularyMut,
};
use json_ld::{rdf::Value, RdfQuads, ValidId as Subject};
use locspan::{Location, Span};
use rdf_types::{generator::Blank, BlankIdVocabularyMut, Vocabulary, VocabularyMut};
use std::{
borrow::Cow,
collections::{BTreeMap, HashMap, HashSet},
@ -46,11 +43,6 @@ pub trait Sha256 {
fn finalize_hex_and_reset(&mut self) -> HexHash;
}
type Expanded<N, D> = Meta<
ExpandedDocument<<N as IriVocabulary>::Iri, <N as BlankIdVocabulary>::BlankId, Location<D>>,
Location<D>,
>;
pub struct OutputDataset<N>
where
N: Vocabulary,
@ -75,13 +67,14 @@ where
hash_to_blank_nodes: BTreeMap<HexHash, HashSet<N::BlankId>>,
}
pub fn normalize<N, D, S>(
pub fn normalize<N, D, R, S>(
vocabulary: &mut N,
document_id: D,
expanded: Expanded<N, D>,
rdf_quads: R,
bail_on_large_inputs: bool,
) -> Result<OutputDataset<N>, Security>
where
R: RdfQuads<N::Iri, N::BlankId, Location<D>>,
D: Clone,
S: Sha256 + Default,
N: Vocabulary + VocabularyMut + Default,
@ -90,7 +83,7 @@ where
{
CanonicalizationState::<N, S>::new(vocabulary).normalize(
document_id,
expanded,
rdf_quads,
bail_on_large_inputs,
)
}
@ -114,22 +107,21 @@ where
}
}
fn normalize<D>(
fn normalize<D, R>(
mut self,
document_id: D,
expanded: Expanded<N, D>,
rdf_quads: R,
bail_on_large_inputs: bool,
) -> Result<OutputDataset<N>, Security>
where
R: RdfQuads<N::Iri, N::BlankId, Location<D>>,
D: Clone,
S: Sha256,
N: VocabularyMut + Default,
N::Iri: Clone + Eq + Hash + Send + Sync + std::fmt::Debug,
N::BlankId: Clone + Eq + Hash + Send + Sync + std::fmt::Debug,
{
let input_dataset = self.input_dataset(document_id, expanded);
println!("{:?}", input_dataset);
let input_dataset = self.input_dataset(document_id, rdf_quads);
// Step 2
self.find_blank_nodes(&input_dataset);
@ -145,8 +137,9 @@ where
}
// (preparing input dataset is not a step, but we're coming from json ld types here)
fn input_dataset<D>(&mut self, document_id: D, expanded: Expanded<N, D>) -> InputDataset<N>
fn input_dataset<R, D>(&mut self, document_id: D, rdf_quads: R) -> InputDataset<N>
where
R: RdfQuads<N::Iri, N::BlankId, Location<D>>,
D: Clone,
N: VocabularyMut,
N::Iri: Clone + Eq + Hash + Send + Sync,
@ -154,7 +147,7 @@ where
{
let mut pre_gen = Blank::new().with_metadata(Location::new(document_id, Span::default()));
InputDataset::from_expanded(expanded, &mut self.vocabulary, &mut pre_gen)
InputDataset::from_rdf_quads(rdf_quads, &mut self.vocabulary, &mut pre_gen)
}
// Step 2
@ -282,7 +275,7 @@ where
}
// step 6.2.2
let mut temporary_issuer = make_issuer("_:b");
let mut temporary_issuer = make_issuer("b");
// step 6.2.3
let mut issued_identifier_list = Default::default();
@ -781,7 +774,7 @@ where
}
fn canonicalization_node_generator() -> Blank {
make_issuer("_:c14n")
make_issuer("c14n")
}
fn make_issuer(prefix: &str) -> Blank {