Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function(err, data) {
if (err) throw err;
jsonld.compact(data, context, function(err, data) {
if (err) throw err;
fs.writeFile(
path.resolve(path.dirname(__dirname), 'src', 'schema_org.json'),
JSON.stringify(data, null, 2),
function(err) {
if (err) throw err;
// grab schema.org context
request.get(
{
url: 'http://schema.org',
headers: { Accept: 'application/ld+json' }
},
(err, resp, context) => {
if (err) throw err;
if (resp.statusCode >= 400) {
async createProof(
{document, purpose, documentLoader, expansionMap, compactProof}) {
// build proof (currently known as `signature options` in spec)
let proof;
if(this.proof) {
// use proof JSON-LD document passed to API
proof = await jsonld.compact(
this.proof, constants.SECURITY_CONTEXT_URL,
{documentLoader, expansionMap, compactToRelative: false});
} else {
// create proof JSON-LD document
proof = {'@context': constants.SECURITY_CONTEXT_URL};
}
// ensure proof type is set
proof.type = this.type;
// set default `now` date if not given in `proof` or `options`
let date = this.date;
if(proof.created === undefined && date === undefined) {
date = new Date();
}
let expandedProof;
if(suite.legacy) {
expandedProof = {
[constants.SECURITY_SIGNATURE_URL]: proof
};
} else {
expandedProof = {
[constants.SECURITY_PROOF_URL]: {'@graph': proof}
};
}
// account for type-scoped `proof` definition by getting document types
const {types, alias} = await _getTypeInfo(
{document, documentLoader, expansionMap});
expandedProof['@type'] = types;
const ctx = jsonld.getValues(document, '@context');
const compactProof = await jsonld.compact(
expandedProof, ctx,
{documentLoader, expansionMap, compactToRelative: false});
delete compactProof[alias];
delete compactProof['@context'];
// add proof to document
const key = Object.keys(compactProof)[0];
jsonld.addValue(document, key, compactProof[key]);
} else {
// in-place restore any existing proofs
/*if(existingProofs) {
document[proofProperty] = existingProofs;
}*/
// add new proof
delete proof['@context'];
jsonld.addValue(document, proofProperty, proof);
async function _getTypeInfo({document, documentLoader, expansionMap}) {
// determine `@type` alias, if any
const ctx = jsonld.getValues(document, '@context');
const compacted = await jsonld.compact(
{'@type': '_:b0'}, ctx, {documentLoader, expansionMap});
delete compacted['@context'];
const alias = Object.keys(compacted)[0];
// optimize: expand only `@type` and `type` values
const toExpand = {'@context': ctx};
toExpand['@type'] = jsonld.getValues(document, '@type')
.concat(jsonld.getValues(document, alias));
const expanded = (await jsonld.expand(
toExpand, {documentLoader, expansionMap}))[0] || {};
return {types: jsonld.getValues(expanded, '@type'), alias};
}
async function _getProofs({
document, legacy, documentLoader, expansionMap, compactProof}) {
// handle document preprocessing to find proofs
const proofProperty = legacy ? 'signature' : 'proof';
let proofSet;
if(compactProof) {
// if we must compact the proof(s) then we must first compact the input
// document to find the proof(s)
document = await jsonld.compact(
document, constants.SECURITY_CONTEXT_URL,
{documentLoader, expansionMap, compactToRelative: false});
}
proofSet = jsonld.getValues(document, proofProperty);
delete document[proofProperty];
if(proofSet.length === 0) {
// no possible matches
throw new Error('No matching proofs found in the given document.');
}
// TODO: consider in-place editing to optimize
// shallow copy proofs and add SECURITY_CONTEXT_URL
proofSet = proofSet.map(proof => ({
'@context': constants.SECURITY_CONTEXT_URL,
jsonld.fromRDF(output, {format: 'application/nquads'}, function(err, doc) {
jsonld.compact(doc, context, function(err, compacted) {
var jsonresult = JSON.stringify(compacted);
//ugly fix for https://github.com/iRail/stations/issues/8
jsonresult = jsonresult.replace(/"alternative":({.*?})/gi,"\"alternative\":[$1]");
console.log(jsonresult);
});
});
}
async ({ value, splitEditorValue }) => {
const jsonLd = await compact(
JSON.parse(value),
JSON.parse(splitEditorValue)
);
return JSON.stringify(jsonLd, null, 2);
},
[]
jsonld.frame(expanded[iri], frame, function(err, framed) {
if (err || expanded === null) {
return callback(err, framed);
}
jsonld.compact(framed, context, options, callback);
});
});
function _next(doc){
var ctx;
if (doc['@context'] === SchemaOrgIo.contextUrl) {//help for testing
ctx = doc['@context'];
doc['@context'] = ctxUrl;
}
jsonld.compact(doc, ctxUrl, function(err, cdoc){
if (err) return callback(err);
if (ctx && cdoc['@context'] === ctxUrl) {
cdoc['@context'] = ctx;
}
callback(null, cdoc);
});
};
var object;
if ((N3.Util.isUri(triple.object) || N3.Util.isBlank(triple.object)) && !map[triple.object]) {
object = jsonldResource(triple.object);
}
if (triple.predicate === rdf.type) {
jsonld.addValue(sub, '@type', triple.object, { propertyIsArray: true });
return;
}
object = jsonldObject(triple.object);
jsonld.addValue(sub, triple.predicate, object, { propertyIsArray: true });
});
jsonld.compact(resources, context, function(err, json) {
if (err) {
callback(err);
}
var content = JSON.stringify(json, undefined, 4);
callback(null, media.jsonld, content);
});
};