Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
this.app.use((req, res) => {
const report = FullTracesReport.decode(req.body);
this.reports.push(report);
res.end();
// Resolve any outstanding Promises with our new report data.
reportResolver(this.reports);
});
}
return;
}
// Send traces asynchronously, so that (eg) addTrace inside a resolver
// doesn't block on it.
await Promise.resolve();
if (this.options.debugPrintReports) {
console.log(`Engine sending report: ${JSON.stringify(report.toJSON())}`);
}
const protobufError = FullTracesReport.verify(report);
if (protobufError) {
throw new Error(`Error encoding report: ${protobufError}`);
}
const message = FullTracesReport.encode(report).finish();
const compressed = await new Promise((resolve, reject) => {
// The protobuf library gives us a Uint8Array. Node 8's zlib lets us
// pass it directly; convert for the sake of Node 6. (No support right
// now for Node 4, which lacks Buffer.from.)
const messageBuffer = Buffer.from(
message.buffer as ArrayBuffer,
message.byteOffset,
message.byteLength,
);
gzip(messageBuffer, (err, gzipResult) => {
if (err) {
reject(err);
} else {
resolve(gzipResult);
}
const report = this.reports[schemaHash];
this.resetReport(schemaHash);
if (Object.keys(report.tracesPerQuery).length === 0) {
return;
}
// Send traces asynchronously, so that (eg) addTrace inside a resolver
// doesn't block on it.
await Promise.resolve();
if (this.options.debugPrintReports) {
console.log(`Engine sending report: ${JSON.stringify(report.toJSON())}`);
}
const protobufError = FullTracesReport.verify(report);
if (protobufError) {
throw new Error(`Error encoding report: ${protobufError}`);
}
const message = FullTracesReport.encode(report).finish();
const compressed = await new Promise((resolve, reject) => {
// The protobuf library gives us a Uint8Array. Node 8's zlib lets us
// pass it directly; convert for the sake of Node 6. (No support right
// now for Node 4, which lacks Buffer.from.)
const messageBuffer = Buffer.from(
message.buffer as ArrayBuffer,
message.byteOffset,
message.byteLength,
);
gzip(messageBuffer, (err, gzipResult) => {
if (err) {
let traceBuffer: Buffer | undefined;
let traceParsingFailed = false;
try {
// XXX support non-Node implementations by using Uint8Array? protobufjs
// supports that, but there's not a no-deps base64 implementation.
traceBuffer = Buffer.from(traceBase64, 'base64');
} catch (err) {
console.error(
`error decoding base64 for federated trace from ${fetch.serviceName}: ${err}`,
);
traceParsingFailed = true;
}
if (traceBuffer) {
try {
const trace = Trace.decode(traceBuffer);
traceNode.trace = trace;
} catch (err) {
console.error(
`error decoding protobuf for federated trace from ${fetch.serviceName}: ${err}`,
);
traceParsingFailed = true;
}
}
traceNode.traceParsingFailed = traceParsingFailed;
}
}
return response.data;
}
}
async function executeNode(
context: ExecutionContext,
node: PlanNode,
results: ResultMap | ResultMap[],
path: ResponsePath,
captureTraces: boolean,
): Promise {
if (!results) {
// XXX I don't understand `results` threading well enough to understand when this happens
// and if this corresponds to a real query plan node that should be reported or not.
//
// This may be if running something like `query { fooOrNullFromServiceA {
// somethingFromServiceB } }` and the first field is null, then we don't bother to run the
// inner field at all.
return new Trace.QueryPlanNode();
}
switch (node.kind) {
case 'Sequence': {
const traceNode = new Trace.QueryPlanNode.SequenceNode();
for (const childNode of node.nodes) {
const childTraceNode = await executeNode(
context,
childNode,
results,
path,
captureTraces,
);
traceNode.nodes.push(childTraceNode!);
}
return new Trace.QueryPlanNode({ sequence: traceNode });
const traceNode = new Trace.QueryPlanNode.FetchNode({
serviceName: node.serviceName,
// executeFetch will fill in the other fields if desired.
});
try {
await executeFetch(
context,
node,
results,
path,
captureTraces ? traceNode : null,
);
} catch (error) {
context.errors.push(error);
}
return new Trace.QueryPlanNode({ fetch: traceNode });
}
}
}
export function makeTraceDetails(
variables: Record,
sendVariableValues?: VariableValueOptions,
operationString?: string,
): Trace.Details {
const details = new Trace.Details();
const variablesToRecord = (() => {
if (sendVariableValues && 'transform' in sendVariableValues) {
const originalKeys = Object.keys(variables);
try {
// Custom function to allow user to specify what variablesJson will look like
const modifiedVariables = sendVariableValues.transform({
variables: variables,
operationString: operationString,
});
return cleanModifiedVariables(originalKeys, modifiedVariables);
} catch (e) {
// If the custom function provided by the user throws an exception,
// change all the variable values to an appropriate error message.
return handleVariableValueTransformError(originalKeys);
}
} else {
this.reportHeaders[schemaHash] = new ReportHeader({
...serviceHeaderDefaults,
schemaHash,
schemaTag:
this.options.schemaTag || process.env.ENGINE_SCHEMA_TAG || '',
});
// initializes this.reports[reportHash]
this.resetReport(schemaHash);
}
const report = this.reports[schemaHash];
const protobufError = Trace.verify(trace);
if (protobufError) {
throw new Error(`Error encoding trace: ${protobufError}`);
}
const encodedTrace = Trace.encode(trace).finish();
const signature = await this.getTraceSignature({
queryHash,
documentAST,
queryString,
operationName,
});
const statsReportKey = `# ${operationName || '-'}\n${signature}`;
if (!report.tracesPerQuery.hasOwnProperty(statsReportKey)) {
report.tracesPerQuery[statsReportKey] = new Traces();
(report.tracesPerQuery[statsReportKey] as any).encodedTraces = [];
}
// See comment on our override of Traces.encode inside of
// apollo-engine-reporting-protobuf to learn more about this strategy.
(report.tracesPerQuery[statsReportKey] as any).encodedTraces.push(
public format(): [string, string] | undefined {
if (!this.enabled) {
return;
}
if (this.done) {
throw Error('format called twice?');
}
// We record the end time at the latest possible time: right before serializing the trace.
// If we wait any longer, the time we record won't actually be sent anywhere!
this.treeBuilder.stopTiming();
this.done = true;
const encodedUint8Array = Trace.encode(this.treeBuilder.trace).finish();
const encodedBuffer = Buffer.from(
encodedUint8Array,
encodedUint8Array.byteOffset,
encodedUint8Array.byteLength,
);
return ['ftv1', encodedBuffer.toString('base64')];
}
}
public async addTrace({
trace,
queryHash,
documentAST,
operationName,
queryString,
schemaHash,
}: AddTraceArgs): Promise {
// Ignore traces that come in after stop().
if (this.stopped) {
return;
}
if (!(schemaHash in this.reports)) {
this.reportHeaders[schemaHash] = new ReportHeader({
...serviceHeaderDefaults,
schemaHash,
schemaTag:
this.options.schemaTag || process.env.ENGINE_SCHEMA_TAG || '',
});
// initializes this.reports[reportHash]
this.resetReport(schemaHash);
}
const report = this.reports[schemaHash];
const protobufError = Trace.verify(trace);
if (protobufError) {
throw new Error(`Error encoding trace: ${protobufError}`);
}
const encodedTrace = Trace.encode(trace).finish();