Skip to content

Commit

Permalink
Merge remote-tracking branch 'migration/main' into nodejs-document-ai…
Browse files Browse the repository at this point in the history
…-migration
  • Loading branch information
ace-n committed Nov 11, 2022
2 parents 47d6fd1 + d58c964 commit 20ff589
Show file tree
Hide file tree
Showing 24 changed files with 1,412 additions and 0 deletions.
4 changes: 4 additions & 0 deletions document-ai/.eslintrc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
---
rules:
no-console: off
node/no-unsupported-features/node-builtins: off
149 changes: 149 additions & 0 deletions document-ai/batch-process-document.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,149 @@
/**
* Copyright 2020 Google LLC
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

const uuid = require('uuid');

async function main(
projectId = 'YOUR_PROJECT_ID',
location = 'YOUR_PROJECT_LOCATION',
processorId = 'YOUR_PROCESSOR_ID', // Create this in the Cloud Console
gcsInputUri = 'gs://cloud-samples-data/documentai/invoice.pdf',
gcsOutputUri = 'output-bucket',
gcsOutputUriPrefix = uuid.v4()
) {
// [START documentai_batch_process_document]
/**
* TODO(developer): Uncomment these variables before running the sample.
*/
// const projectId = 'YOUR_PROJECT_ID';
// const location = 'YOUR_PROJECT_LOCATION'; // Format is 'us' or 'eu'
// const processorId = 'YOUR_PROCESSOR_ID';
// const gcsInputUri = 'YOUR_SOURCE_PDF';
// const gcsOutputUri = 'YOUR_STORAGE_BUCKET';
// const gcsOutputUriPrefix = 'YOUR_STORAGE_PREFIX';

// Imports the Google Cloud client library
const {DocumentProcessorServiceClient} =
require('@google-cloud/documentai').v1;
const {Storage} = require('@google-cloud/storage');

// Instantiates Document AI, Storage clients
const client = new DocumentProcessorServiceClient();
const storage = new Storage();

const {default: PQueue} = require('p-queue');

async function batchProcessDocument() {
const name = `projects/${projectId}/locations/${location}/processors/${processorId}`;

// Configure the batch process request.
const request = {
name,
inputDocuments: {
gcsDocuments: {
documents: [
{
gcsUri: gcsInputUri,
mimeType: 'application/pdf',
},
],
},
},
documentOutputConfig: {
gcsOutputConfig: {
gcsUri: `${gcsOutputUri}/${gcsOutputUriPrefix}/`,
},
},
};

// Batch process document using a long-running operation.
// You can wait for now, or get results later.
// Note: first request to the service takes longer than subsequent
// requests.
const [operation] = await client.batchProcessDocuments(request);

// Wait for operation to complete.
await operation.promise();
console.log('Document processing complete.');

// Query Storage bucket for the results file(s).
const query = {
prefix: gcsOutputUriPrefix,
};

console.log('Fetching results ...');

// List all of the files in the Storage bucket
const [files] = await storage.bucket(gcsOutputUri).getFiles(query);

// Add all asynchronous downloads to queue for execution.
const queue = new PQueue({concurrency: 15});
const tasks = files.map((fileInfo, index) => async () => {
// Get the file as a buffer
const [file] = await fileInfo.download();

console.log(`Fetched file #${index + 1}:`);

// The results stored in the output Storage location
// are formatted as a document object.
const document = JSON.parse(file.toString());
const {text} = document;

// Extract shards from the text field
const getText = textAnchor => {
if (!textAnchor.textSegments || textAnchor.textSegments.length === 0) {
return '';
}

// First shard in document doesn't have startIndex property
const startIndex = textAnchor.textSegments[0].startIndex || 0;
const endIndex = textAnchor.textSegments[0].endIndex;

return text.substring(startIndex, endIndex);
};

// Read the text recognition output from the processor
console.log('The document contains the following paragraphs:');

const [page1] = document.pages;
const {paragraphs} = page1;
for (const paragraph of paragraphs) {
const paragraphText = getText(paragraph.layout.textAnchor);
console.log(`Paragraph text:\n${paragraphText}`);
}

// Form parsing provides additional output about
// form-formatted PDFs. You must create a form
// processor in the Cloud Console to see full field details.
console.log('\nThe following form key/value pairs were detected:');

const {formFields} = page1;
for (const field of formFields) {
const fieldName = getText(field.fieldName.textAnchor);
const fieldValue = getText(field.fieldValue.textAnchor);

console.log('Extracted key value pair:');
console.log(`\t(${fieldName}, ${fieldValue})`);
}
});
await queue.addAll(tasks);
}
// [END documentai_batch_process_document]

batchProcessDocument();
}
main(...process.argv.slice(2));
25 changes: 25 additions & 0 deletions document-ai/package.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
{
"name": "nodejs-document-ai-samples",
"private": true,
"license": "Apache-2.0",
"author": "Google LLC",
"engines": {
"node": ">=12.0.0"
},
"files": [
"*.js"
],
"scripts": {
"test": "mocha test/*.js --timeout 600000"
},
"dependencies": {
"@google-cloud/documentai": "^6.1.0",
"@google-cloud/storage": "^6.0.0",
"p-queue": "^6.6.2",
"uuid": "^9.0.0"
},
"devDependencies": {
"chai": "^4.2.0",
"mocha": "^8.0.0"
}
}
132 changes: 132 additions & 0 deletions document-ai/process-document-form.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,132 @@
/**
* Copyright 2021, Google, Inc.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

'use strict';

async function main(projectId, location, processorId, filePath) {
// [START documentai_process_form_document]
/**
* TODO(developer): Uncomment these variables before running the sample.
*/
// const projectId = 'YOUR_PROJECT_ID';
// const location = 'YOUR_PROJECT_LOCATION'; // Format is 'us' or 'eu'
// const processorId = 'YOUR_PROCESSOR_ID'; // Create processor in Cloud Console
// const filePath = '/path/to/local/pdf';

const {DocumentProcessorServiceClient} =
require('@google-cloud/documentai').v1beta3;

// Instantiates a client
const client = new DocumentProcessorServiceClient();

async function processDocument() {
// The full resource name of the processor, e.g.:
// projects/project-id/locations/location/processor/processor-id
// You must create new processors in the Cloud Console first
const name = `projects/${projectId}/locations/${location}/processors/${processorId}`;

// Read the file into memory.
const fs = require('fs').promises;
const imageFile = await fs.readFile(filePath);

// Convert the image data to a Buffer and base64 encode it.
const encodedImage = Buffer.from(imageFile).toString('base64');

const request = {
name,
rawDocument: {
content: encodedImage,
mimeType: 'application/pdf',
},
};

// Recognizes text entities in the PDF document
const [result] = await client.processDocument(request);

console.log('Document processing complete.');

// Read the table and form fields output from the processor
// The form processor also contains OCR data. For more information
// on how to parse OCR data please see the OCR sample.
// For a full list of Document object attributes,
// please reference this page: https://googleapis.dev/nodejs/documentai/latest/index.html
const {document} = result;
const {text} = document;
console.log(`Full document text: ${JSON.stringify(text)}`);
console.log(`There are ${document.pages.length} page(s) in this document.`);

for (const page of document.pages) {
console.log(`\n\n**** Page ${page.pageNumber} ****`);

console.log(`Found ${page.tables.length} table(s):`);
for (const table of page.tables) {
const numCollumns = table.headerRows[0].cells.length;
const numRows = table.bodyRows.length;
console.log(`Table with ${numCollumns} columns and ${numRows} rows:`);
printTableInfo(table, text);
}
console.log(`Found ${page.formFields.length} form field(s):`);
for (const field of page.formFields) {
const fieldName = getText(field.fieldName.textAnchor, text);
const fieldValue = getText(field.fieldValue.textAnchor, text);
console.log(
`\t* ${JSON.stringify(fieldName)}: ${JSON.stringify(fieldValue)}`
);
}
}
}

const printTableInfo = (table, text) => {
// Print header row
let headerRowText = '';
for (const headerCell of table.headerRows[0].cells) {
const headerCellText = getText(headerCell.layout.textAnchor, text);
headerRowText += `${JSON.stringify(headerCellText.trim())} | `;
}
console.log(
`Collumns: ${headerRowText.substring(0, headerRowText.length - 3)}`
);
// Print first body row
let bodyRowText = '';
for (const bodyCell of table.bodyRows[0].cells) {
const bodyCellText = getText(bodyCell.layout.textAnchor, text);
bodyRowText += `${JSON.stringify(bodyCellText.trim())} | `;
}
console.log(
`First row data: ${bodyRowText.substring(0, bodyRowText.length - 3)}`
);
};

// Extract shards from the text field
const getText = (textAnchor, text) => {
if (!textAnchor.textSegments || textAnchor.textSegments.length === 0) {
return '';
}

// First shard in document doesn't have startIndex property
const startIndex = textAnchor.textSegments[0].startIndex || 0;
const endIndex = textAnchor.textSegments[0].endIndex;

return text.substring(startIndex, endIndex);
};

// [END documentai_process_form_document]
await processDocument();
}

main(...process.argv.slice(2)).catch(err => {
console.error(err);
process.exitCode = 1;
});
Loading

0 comments on commit 20ff589

Please sign in to comment.