Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Build filetree with missing directories to support --only-findings scans #624

Merged
merged 3 commits into from
Jan 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@ import "react-toastify/dist/ReactToastify.css";
import "bootstrap/dist/css/bootstrap.min.css";
import "react-tooltip/dist/react-tooltip.css";
import "./styles/app.css";
import "./styles/toastify.css";
import "./styles/dashStyles.css";
import "./styles/customFaColors.css";

Expand Down
1 change: 1 addition & 0 deletions src/constants/errors.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
export const NO_RESOURCES_ERROR = "No files found in the scan"
73 changes: 35 additions & 38 deletions src/contexts/dbContext.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ import {
UTIL_CHANNEL,
} from "../constants/IpcConnection";
import { DEFAULT_ROUTE_ON_IMPORT, ROUTES } from "../constants/routes";
import { NO_RESOURCES_ERROR } from "../constants/errors";
import { AddEntry, GetHistory, RemoveEntry } from "../services/historyStore";
import { WorkbenchDB } from "../services/workbenchDB";
import { isSqliteSchemaOutdated } from "../utils/checks";
Expand Down Expand Up @@ -183,12 +184,8 @@ export const WorkbenchDBProvider = (
// Check that the database has the correct header information.
if (!infoHeader) {
const errTitle = "Invalid SQLite file";
const errMessage =
"Invalid SQLite file: " +
sqliteFilePath +
"\n" +
"The SQLite file is invalid. Try re-importing the ScanCode JSON " +
"file and creating a new SQLite file.";
const errMessage = `Invalid SQLite file: ${sqliteFilePath}
The SQLite file is invalid. Try re-importing the ScanCode JSON file and creating a new SQLite file.`;

console.error("Handled invalid sqlite import", {
title: errTitle,
Expand Down Expand Up @@ -239,8 +236,7 @@ export const WorkbenchDBProvider = (
.then((db) => db.File.findOne({ where: { parent: "#" } }))
.then(async (root) => {
if (!root) {
console.error("Root path not found !!!!", root);
return;
throw new Error("Root path not found !!");
}

const defaultPath = root.getDataValue("path");
Expand Down Expand Up @@ -271,35 +267,20 @@ export const WorkbenchDBProvider = (
);

if (!preventNavigation) changeRouteOnImport();
})
.catch((err) => {
const foundInvalidHistoryItem = GetHistory().find(
(historyItem) => historyItem.sqlite_path === sqliteFilePath
);
if (foundInvalidHistoryItem) {
RemoveEntry(foundInvalidHistoryItem);
}
console.error("Err trying to import sqlite:");
console.error(err);
toast.error(
`Unexpected error while importing json \nPlease check console for more info`
);
abortImport();
});
})
.catch((err) => {
.catch((err: Error) => {
abortImport();
const foundInvalidHistoryItem = GetHistory().find(
(historyItem) => historyItem.sqlite_path === sqliteFilePath
);
if (foundInvalidHistoryItem) {
RemoveEntry(foundInvalidHistoryItem);
}
console.error("Err trying to import sqlite:");
console.error(err);
console.error("Err trying to import sqlite:", err);
toast.error(
`Unexpected error while finalising json import \nPlease check console for more info`
`Sqlite file is outdated or corrupt\nPlease try importing json file again`
);
abortImport();
});
}

Expand Down Expand Up @@ -341,11 +322,10 @@ export const WorkbenchDBProvider = (
.then((db) => db.File.findOne({ where: { parent: "#" } }))
.then(async (root) => {
if (!root) {
console.error("Root path not found !!!!");
console.error("Root:", root);
abortImport();
return;
throw new Error("Root path not found !!!!");
}

const defaultPath = root.getDataValue("path");

await updateWorkbenchDB(newWorkbenchDB, sqliteFilePath);
Expand All @@ -369,17 +349,34 @@ export const WorkbenchDBProvider = (
);

if (!preventNavigation) changeRouteOnImport();
})
.catch((err) => {
abortImport();
const foundInvalidHistoryItem = GetHistory().find(
(historyItem) => historyItem.sqlite_path === sqliteFilePath
);
if (foundInvalidHistoryItem) {
RemoveEntry(foundInvalidHistoryItem);
}
console.error(err);
toast.error(
`Can't resolve root directory \nPlease check console for more info`
);
});
})
.catch((err) => {
.catch((err: Error) => {
abortImport();
console.error(
"Some error parsing data (caught in workbenchContext) !!",
err
);
toast.error(
"Some error parsing data !! \nPlease check console for more info"
);
if (err.message === NO_RESOURCES_ERROR) {
toast.error("No resources found in scan\nAborting import");
} else {
console.error(
"Some error parsing json data (caught in dbContext)",
err
);
toast.error(
"Some error parsing json data !! \nPlease check console for more info"
);
}
});
}

Expand Down
8 changes: 7 additions & 1 deletion src/pages/Licenses/Licenses.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,13 @@ const LicenseDetections = () => {
activateLicenseClue(newLicenseClues[0]);
}
}
})().then(endProcessing);
})()
.then(endProcessing)
.catch((err) => {
endProcessing();
console.error("Error getting license contents", err);
toast.error("Error loading license data.");
});
}, []);

const handleItemToggle = (
Expand Down
6 changes: 6 additions & 0 deletions src/pages/TableView/TableView.tsx
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import { Op } from "sequelize";
import { toast } from "react-toastify";
import React, { useEffect, useState } from "react";
import { ColDef, ColumnApi, GridApi, GridReadyEvent } from "ag-grid-community";

Expand Down Expand Up @@ -101,6 +102,11 @@ const TableView = () => {
if (prevColDefs.length > 0) return prevColDefs; // Don't mutate cols, if already set
return [...COLUMN_GROUPS.DEFAULT];
});
})
.catch((err) => {
endProcessing();
console.error("Error getting tableview contents", err);
toast.error("Error loading table data.");
});

// Update set filters whenever new db is loaded or path is changed
Expand Down
30 changes: 24 additions & 6 deletions src/services/models/header.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,15 +53,33 @@ export default function headerModel(sequelize: Sequelize) {
primaryKey: true,
type: DataTypes.INTEGER,
},
json_file_name: DataTypes.STRING,
tool_name: DataTypes.STRING,
tool_version: DataTypes.STRING,
notice: DataTypes.STRING,
duration: DataTypes.DOUBLE,
json_file_name: {
type: DataTypes.STRING,
defaultValue: null,
},
tool_name: {
type: DataTypes.STRING,
defaultValue: null,
},
tool_version: {
type: DataTypes.STRING,
defaultValue: null,
},
notice: {
type: DataTypes.STRING,
defaultValue: null,
},
duration: {
type: DataTypes.DOUBLE,
defaultValue: null,
},
options: jsonDataType("options", {}),
input: jsonDataType("input", []),
header_content: DataTypes.STRING,
files_count: DataTypes.INTEGER,
files_count: {
type: DataTypes.INTEGER,
defaultValue: 0,
},
output_format_version: {
type: DataTypes.STRING,
defaultValue: null,
Expand Down
89 changes: 62 additions & 27 deletions src/services/workbenchDB.ts
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ import { TodoAttributes } from "./models/todo";
import { HeaderAttributes } from "./models/header";
import { LicenseReferenceAttributes } from "./models/licenseReference";
import { LicenseRuleReferenceAttributes } from "./models/licenseRuleReference";
import { NO_RESOURCES_ERROR } from "../constants/errors";

const { version: workbenchVersion } = packageJson;

Expand Down Expand Up @@ -282,12 +283,12 @@ export class WorkbenchDB {
fileList.forEach((file) => {
const fileParentPath = file.getDataValue("parent");
const fileNode = pathToNodeMap.get(file.getDataValue("path"));
if (Number(file.getDataValue("id")) !== 0) {
if (file.getDataValue("parent") === "#") {
roots.push(fileNode);
} else {
if (pathToNodeMap.has(fileParentPath)) {
pathToNodeMap.get(fileParentPath).children?.push(fileNode);
}
} else {
roots.push(fileNode);
}
});

Expand All @@ -309,11 +310,10 @@ export class WorkbenchDB {
const stream = fs.createReadStream(jsonFilePath, { encoding: "utf8" });
let files_count = 0;
let dirs_count = 0;
let index = 0;
let rootPath: string | null = null;
let hasRootPath = false;
const batchSize = 1000;
let files: Resource[] = [];
const parsedFilePaths = new Set<string>();

let progress = 0;
let promiseChain: Promise<unknown> = this.sync;

Expand Down Expand Up @@ -379,38 +379,40 @@ export class WorkbenchDB {
.on("data", function (file?: Resource) {
if (!file) return;

if (!rootPath) {
rootPath = file.path.split("/")[0];
}
if (rootPath === file.path) {
hasRootPath = true;
}
// @TODO: When/if scancode reports directories in its header, this needs
// to be replaced.
if (index === 0) {
if (parsedFilePaths.size === 0) {
dirs_count = file.dirs_count;
}
file.id = index++;
file.id = parsedFilePaths.size;

primaryPromise._parseLicenseDetections(file, TopLevelData);
primaryPromise._parseLicenseClues(file, TopLevelData);

files.push(file);
parsedFilePaths.add(file.path);

if (files.length >= batchSize) {
// Need to set a new variable before handing to promise
this.pause();

promiseChain = promiseChain
.then(() =>
primaryPromise._imputeIntermediateDirectories(
files,
parsedFilePaths
)
)
.then(() => primaryPromise._batchCreateFiles(files))
.then(() => {
const currentProgress = Math.round(
(index / (files_count + dirs_count)) * 100
(parsedFilePaths.size / (files_count + dirs_count)) * 100
);
if (currentProgress > progress) {
progress = currentProgress;
console.info(
`Batch-${++batchCount} completed, \n`,
`JSON Import progress @ ${progress} % -- ${index}/${files_count}+${dirs_count}`
`JSON Import progress @ ${progress} % -- ${parsedFilePaths.size}/${files_count}+${dirs_count}`
);
onProgressUpdate(progress);
}
Expand All @@ -426,22 +428,20 @@ export class WorkbenchDB {
// Add root directory into data
// See https://github.com/nexB/scancode-toolkit/issues/543
promiseChain
.then(() =>
this._imputeIntermediateDirectories(files, parsedFilePaths)
)
.then(() => {
if (rootPath && !hasRootPath) {
files.push({
path: rootPath,
name: rootPath,
type: "directory",
files_count: files_count,
});
if (files.length === 0) {
throw new Error(NO_RESOURCES_ERROR);
}
})
.then(() => this._batchCreateFiles(files))
.then(() => this.db.Header.create(TopLevelData.parsedHeader))
.then(() => {
console.info(
`Batch-${++batchCount} completed, \n`,
`JSON Import progress @ ${progress} % -- ${index}/${files_count}+${dirs_count}`
`JSON Import progress @ ${progress} % -- ${parsedFilePaths.size}/${files_count}+${dirs_count}`
);
onProgressUpdate(90);
})
Expand All @@ -456,10 +456,12 @@ export class WorkbenchDB {
.then(() => {
onProgressUpdate(100);
console.info("JSON parse completed (final step)");
console.timeEnd("json-parse-time");
resolve();
})
.catch((e: unknown) => reject(e));
.catch((e: unknown) => reject(e))
.finally(() => {
console.timeEnd("json-parse-time");
});
})
.on("error", (err: unknown) => {
console.error(
Expand Down Expand Up @@ -600,7 +602,7 @@ export class WorkbenchDB {
duration: header.duration,
options: header?.options || {},
input,
files_count: header.extra_data?.files_count,
files_count: header.extra_data?.files_count || 0,
output_format_version: header.output_format_version,
spdx_license_list_version: header.extra_data?.spdx_license_list_version,
operating_system: header.extra_data?.system_environment?.operating_system,
Expand Down Expand Up @@ -880,6 +882,39 @@ export class WorkbenchDB {
});
}

// Adds & modifies files array in place, adding missing intermediate directories
_imputeIntermediateDirectories(
files: Resource[],
parsedFilePaths: Set<string>
) {
const intermediateDirectories: Resource[] = [];

files.forEach((file) => {
file.parent = parentPath(file.path);

// Add intermediate directories if parent not available in files
if (!parsedFilePaths.has(file.parent)) {
for (
let currentDir = file.parent;
currentDir !== parentPath(currentDir) &&
!parsedFilePaths.has(currentDir);
currentDir = parentPath(currentDir)
) {
intermediateDirectories.push({
id: parsedFilePaths.size,
path: currentDir,
parent: parentPath(currentDir),
name: path.basename(currentDir),
type: "directory",
files_count: 0,
});
parsedFilePaths.add(currentDir);
}
}
});
files.push(...intermediateDirectories);
}

_batchCreateFiles(files: Resource[]) {
// Add batched files to the DB
return this._addFlattenedFiles(files).then(() => this._addFiles(files));
Expand Down
Loading