Skip to content

Commit

Permalink
Merge pull request #1777 from snyk/feat/fix-with-version-provenance
Browse files Browse the repository at this point in the history
Feat: v1 fix with version provenance for requirements.txt projects
  • Loading branch information
lili2311 committed Mar 30, 2021
2 parents 17e3431 + b286418 commit 5ebd685
Show file tree
Hide file tree
Showing 17 changed files with 586 additions and 255 deletions.
10 changes: 10 additions & 0 deletions packages/snyk-fix/src/lib/errors/invalid-workspace.ts
@@ -0,0 +1,10 @@
import { CustomError, ERROR_CODES } from './custom-error';

export class MissingFileNameError extends CustomError {
public constructor() {
super(
'Filename is missing from test result. Please contact support@snyk.io.',
ERROR_CODES.MissingFileName,
);
}
}
Expand Up @@ -8,7 +8,7 @@ import {
import { Workspace } from '../../../../types';
import { containsRequireDirective } from './contains-require-directive';

interface PythonProvenance {
export interface PythonProvenance {
[fileName: string]: ParsedRequirements;
}

Expand All @@ -20,7 +20,8 @@ export async function extractProvenance(
fileName: string,
provenance: PythonProvenance = {},
): Promise<PythonProvenance> {
const requirementsTxt = await workspace.readFile(path.join(dir, fileName));
const requirementsFileName = path.join(dir, fileName);
const requirementsTxt = await workspace.readFile(requirementsFileName);
provenance = {
...provenance,
[fileName]: parseRequirementsFile(requirementsTxt),
Expand Down
226 changes: 193 additions & 33 deletions packages/snyk-fix/src/plugins/python/handlers/pip-requirements/index.ts
@@ -1,17 +1,26 @@
import * as debugLib from 'debug';
import * as pathLib from 'path';
const sortBy = require('lodash.sortby');
const groupBy = require('lodash.groupby');

import {
EntityToFix,
FixChangesSummary,
FixOptions,
WithFixChangesApplied,
RemediationChanges,
Workspace,
} from '../../../../types';
import { PluginFixResponse } from '../../../types';
import { updateDependencies } from './update-dependencies';
import { MissingRemediationDataError } from '../../../../lib/errors/missing-remediation-data';
import { MissingFileNameError } from '../../../../lib/errors/missing-file-name';
import { partitionByFixable } from './is-supported';
import { NoFixesCouldBeAppliedError } from '../../../../lib/errors/no-fixes-applied';
import { parseRequirementsFile } from './update-dependencies/requirements-file-parser';
import { extractProvenance } from './extract-version-provenance';
import {
ParsedRequirements,
parseRequirementsFile,
} from './update-dependencies/requirements-file-parser';

const debug = debugLib('snyk-fix:python:requirements.txt');

Expand All @@ -26,56 +35,207 @@ export async function pipRequirementsTxt(
skipped: [],
};

const { fixable, skipped } = await partitionByFixable(entities);
handlerResult.skipped.push(...skipped);
const { fixable, skipped: notFixable } = await partitionByFixable(entities);
handlerResult.skipped.push(...notFixable);

for (const entity of fixable) {
try {
const fixedEntity = await fixIndividualRequirementsTxt(entity, options);
handlerResult.succeeded.push(fixedEntity);
} catch (e) {
handlerResult.failed.push({ original: entity, error: e });
}
const ordered = sortByDirectory(fixable);
const fixedFilesCache: string[] = [];
for (const dir of Object.keys(ordered)) {
debug(`Fixing entities in directory ${dir}`);
const entitiesPerDirectory = ordered[dir].map((e) => e.entity);
const { failed, succeeded, skipped, fixedFiles } = await fixAll(
entitiesPerDirectory,
options,
fixedFilesCache,
);
fixedFilesCache.push(...fixedFiles);
handlerResult.succeeded.push(...succeeded);
handlerResult.failed.push(...failed);
handlerResult.skipped.push(...skipped);
}
return handlerResult;
}

// TODO: optionally verify the deps install
export async function fixIndividualRequirementsTxt(
export function getRequiredData(
entity: EntityToFix,
options: FixOptions,
): Promise<WithFixChangesApplied<EntityToFix>> {
const fileName = entity.scanResult.identity.targetFile;
const remediationData = entity.testResult.remediation;
if (!remediationData) {
): {
remediation: RemediationChanges;
targetFile: string;
workspace: Workspace;
} {
const { remediation } = entity.testResult;
if (!remediation) {
throw new MissingRemediationDataError();
}
if (!fileName) {
const { targetFile } = entity.scanResult.identity;
if (!targetFile) {
throw new MissingFileNameError();
}
const requirementsTxt = await entity.workspace.readFile(fileName);
const requirementsData = parseRequirementsFile(requirementsTxt);
const { workspace } = entity;
if (!workspace) {
throw new NoFixesCouldBeAppliedError();
}
return { targetFile, remediation, workspace };
}

// TODO: allow handlers per fix type (later also strategies or combine with strategies)
const { updatedManifest, changes } = updateDependencies(
requirementsData,
remediationData.pin,
async function fixAll(
entities: EntityToFix[],
options: FixOptions,
fixedCache: string[],
): Promise<PluginFixResponse & { fixedFiles: string[] }> {
const handlerResult: PluginFixResponse = {
succeeded: [],
failed: [],
skipped: [],
};
for (const entity of entities) {
const targetFile = entity.scanResult.identity.targetFile!;
try {
const { dir, base } = pathLib.parse(targetFile);
// parse & join again to support correct separator
if (fixedCache.includes(pathLib.join(dir, base))) {
handlerResult.succeeded.push({
original: entity,
changes: [{ success: true, userMessage: 'Previously fixed' }],
});
continue;
}
const { changes, fixedFiles } = await applyAllFixes(entity, options);
if (!changes.length) {
debug('Manifest has not changed!');
throw new NoFixesCouldBeAppliedError();
}
fixedCache.push(...fixedFiles);
handlerResult.succeeded.push({ original: entity, changes });
} catch (e) {
debug(`Failed to fix ${targetFile}.\nERROR: ${e}`);
handlerResult.failed.push({ original: entity, error: e });
}
}
return { ...handlerResult, fixedFiles: [] };
}
// TODO: optionally verify the deps install
export async function fixIndividualRequirementsTxt(
workspace: Workspace,
dir: string,
entryFileName: string,
fileName: string,
remediation: RemediationChanges,
parsedRequirements: ParsedRequirements,
options: FixOptions,
directUpgradesOnly: boolean,
): Promise<{ changes: FixChangesSummary[]; appliedRemediation: string[] }> {
const fullFilePath = pathLib.join(dir, fileName);
const { updatedManifest, changes, appliedRemediation } = updateDependencies(
parsedRequirements,
remediation.pin,
directUpgradesOnly,
pathLib.join(dir, entryFileName) !== fullFilePath ? fileName : undefined,
);

// TODO: do this with the changes now that we only return new
if (updatedManifest === requirementsTxt) {
debug('Manifest has not changed!');
throw new NoFixesCouldBeAppliedError();
if (!changes.length) {
return { changes, appliedRemediation };
}

if (!options.dryRun) {
debug('Writing changes to file');
await entity.workspace.writeFile(fileName, updatedManifest);
await workspace.writeFile(pathLib.join(dir, fileName), updatedManifest);
} else {
debug('Skipping writing changes to file in --dry-run mode');
}

return {
original: entity,
changes,
return { changes, appliedRemediation };
}

export async function applyAllFixes(
entity: EntityToFix,
options: FixOptions,
): Promise<{ changes: FixChangesSummary[]; fixedFiles: string[] }> {
const { remediation, targetFile: entryFileName, workspace } = getRequiredData(
entity,
);
const fixedFiles: string[] = [];
const { dir, base } = pathLib.parse(entryFileName);
const provenance = await extractProvenance(workspace, dir, base);
const upgradeChanges: FixChangesSummary[] = [];
const appliedUpgradeRemediation: string[] = [];
/* Apply all upgrades first across all files that are included */
for (const fileName of Object.keys(provenance)) {
const skipApplyingPins = true;
const { changes, appliedRemediation } = await fixIndividualRequirementsTxt(
workspace,
dir,
base,
fileName,
remediation,
provenance[fileName],
options,
skipApplyingPins,
);
appliedUpgradeRemediation.push(...appliedRemediation);
upgradeChanges.push(...changes);
fixedFiles.push(pathLib.join(dir, fileName));
}

/* Apply all left over remediation as pins in the entry targetFile */
const requirementsTxt = await workspace.readFile(entryFileName);

const toPin: RemediationChanges = filterOutAppliedUpgrades(
remediation,
appliedUpgradeRemediation,
);
const directUpgradesOnly = false;
const { changes: pinnedChanges } = await fixIndividualRequirementsTxt(
workspace,
dir,
base,
base,
toPin,
parseRequirementsFile(requirementsTxt),
options,
directUpgradesOnly,
);

return { changes: [...upgradeChanges, ...pinnedChanges], fixedFiles };
}

function filterOutAppliedUpgrades(
remediation: RemediationChanges,
appliedRemediation: string[],
): RemediationChanges {
const pinRemediation: RemediationChanges = {
...remediation,
pin: {}, // delete the pin remediation so we can collect un-applied remediation
};
const pins = remediation.pin;
const lowerCasedAppliedRemediation = appliedRemediation.map((i) =>
i.toLowerCase(),
);
for (const pkgAtVersion of Object.keys(pins)) {
if (!lowerCasedAppliedRemediation.includes(pkgAtVersion.toLowerCase())) {
pinRemediation.pin[pkgAtVersion] = pins[pkgAtVersion];
}
}
return pinRemediation;
}

function sortByDirectory(
entities: EntityToFix[],
): {
[dir: string]: Array<{
entity: EntityToFix;
dir: string;
base: string;
ext: string;
root: string;
name: string;
}>;
} {
const mapped = entities.map((e) => ({
entity: e,
...pathLib.parse(e.scanResult.identity.targetFile!),
}));

const sorted = sortBy(mapped, 'dir');
return groupBy(sorted, 'dir');
}
Expand Up @@ -45,11 +45,13 @@ export async function isSupported(
};
}

const { containsRequire } = await containsRequireDirective(requirementsTxt);
if (containsRequire) {
const { containsRequire, matches } = await containsRequireDirective(
requirementsTxt,
);
if (containsRequire && matches.some((m) => m.includes('c'))) {
return {
supported: false,
reason: `Requirements with ${chalk.bold('-r')} or ${chalk.bold(
reason: `Requirements with ${chalk.bold(
'-c',
)} directive are not yet supported`,
};
Expand Down
Expand Up @@ -6,7 +6,11 @@ import { Requirement } from './requirements-file-parser';
export function generatePins(
requirements: Requirement[],
updates: DependencyPins,
): { pinnedRequirements: string[]; changes: FixChangesSummary[] } {
): {
pinnedRequirements: string[];
changes: FixChangesSummary[];
appliedRemediation: string[];
} {
// Lowercase the upgrades object. This might be overly defensive, given that
// we control this input internally, but its a low cost guard rail. Outputs a
// mapping of upgrade to -> from, instead of the nested upgradeTo object.
Expand All @@ -20,8 +24,10 @@ export function generatePins(
return {
pinnedRequirements: [],
changes: [],
appliedRemediation: [],
};
}
const appliedRemediation: string[] = [];
const changes: FixChangesSummary[] = [];
const pinnedRequirements = Object.keys(lowerCasedPins)
.map((pkgNameAtVersion) => {
Expand All @@ -32,12 +38,14 @@ export function generatePins(
success: true,
userMessage: `Pinned ${pkgName} from ${version} to ${newVersion}`,
});
appliedRemediation.push(pkgNameAtVersion);
return `${newRequirement} # not directly required, pinned by Snyk to avoid a vulnerability`;
})
.filter(isDefined);

return {
pinnedRequirements,
changes,
appliedRemediation,
};
}

0 comments on commit 5ebd685

Please sign in to comment.