Skip to content

Commit

Permalink
[File upload] Migrate routing to NP & add route validation (#52313) (#…
Browse files Browse the repository at this point in the history
…57063)

* Cursory validation

* Handle empty data arrays and settings conditionally

* Set validation defaults. Move logic to routes folder and separate for testing

* Move plugin init back into routes folder. Syntax updates

* Migrate router to NP

* Use new np router and fetch. Add placeholder schema validation

* Override default 'maxBytes'

* Body with first-level schema keys in place

* Add conditional validation of mappings, data and settings. Clean up old joi code

* Ensure query is handled correctly on both sides. Iron out decision logic on server-side

* Move conditional validation to first step in payload handling

* Update http_service to work with latest NP changes on master

* Some reorg. Only update telemetry if no errors

* Clean up

* Test file upload validation logic

* Linting

* Review feedback. Remove unneeded apiBasePath var

* Pass entire req object with through to ES, not just the validated fields
  • Loading branch information
Aaron Caldwell committed Feb 7, 2020
1 parent a21218d commit 0a508b0
Show file tree
Hide file tree
Showing 7 changed files with 212 additions and 66 deletions.
4 changes: 2 additions & 2 deletions x-pack/legacy/plugins/file_upload/public/kibana_services.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,12 @@ export const indexPatternService = npStart.plugins.data.indexPatterns;

export let savedObjectsClient;
export let basePath;
export let apiBasePath;
export let kbnVersion;
export let kbnFetch;

export const initServicesAndConstants = ({ savedObjects, http, injectedMetadata }) => {
savedObjectsClient = savedObjects.client;
basePath = http.basePath.basePath;
apiBasePath = http.basePath.prepend('/api');
kbnVersion = injectedMetadata.getKibanaVersion(DEFAULT_KBN_VERSION);
kbnFetch = http.fetch;
};
14 changes: 5 additions & 9 deletions x-pack/legacy/plugins/file_upload/public/util/http_service.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,8 @@
* you may not use this file except in compliance with the Elastic License.
*/

// service for interacting with the server

import { addSystemApiHeader } from 'ui/system_api';
import { i18n } from '@kbn/i18n';
import { kbnVersion } from '../kibana_services';
import { kbnFetch } from '../kibana_services';

export async function http(options) {
if (!(options && options.url)) {
Expand All @@ -17,11 +14,10 @@ export async function http(options) {
});
}
const url = options.url || '';
const headers = addSystemApiHeader({
const headers = {
'Content-Type': 'application/json',
'kbn-version': kbnVersion,
...options.headers,
});
};

const allHeaders = options.headers === undefined ? headers : { ...options.headers, ...headers };
const body = options.data === undefined ? null : JSON.stringify(options.data);
Expand All @@ -30,6 +26,7 @@ export async function http(options) {
method: options.method || 'GET',
headers: allHeaders,
credentials: 'same-origin',
query: options.query,
};

if (body !== null) {
Expand All @@ -40,8 +37,7 @@ export async function http(options) {

async function doFetch(url, payload) {
try {
const resp = await fetch(url, payload);
return resp.json();
return await kbnFetch(url, payload);
} catch (err) {
return {
failures: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
*/

import { http as httpService } from './http_service';
import { indexPatternService, apiBasePath, savedObjectsClient } from '../kibana_services';
import { indexPatternService, savedObjectsClient } from '../kibana_services';
import { getGeoJsonIndexingDetails } from './geo_processing';
import { sizeLimitedChunking } from './size_limited_chunking';
import { i18n } from '@kbn/i18n';
Expand Down Expand Up @@ -37,11 +37,9 @@ export async function indexData(parsedFile, transformDetails, indexName, dataTyp
data: [],
index: indexName,
});
let id;
const id = createdIndex && createdIndex.id;
try {
if (createdIndex && createdIndex.id) {
id = createdIndex.id;
} else {
if (!id) {
throw i18n.translate('xpack.fileUpload.indexingService.errorCreatingIndex', {
defaultMessage: 'Error creating index',
});
Expand Down Expand Up @@ -117,12 +115,13 @@ function transformDataByFormatForIndexing(transform, parsedFile, dataType) {
}

async function writeToIndex(indexingDetails) {
const paramString = indexingDetails.id !== undefined ? `?id=${indexingDetails.id}` : '';
const query = indexingDetails.id ? { id: indexingDetails.id } : null;
const { appName, index, data, settings, mappings, ingestPipeline } = indexingDetails;

return await httpService({
url: `${apiBasePath}/fileupload/import${paramString}`,
url: `/api/fileupload/import`,
method: 'POST',
...(query ? { query } : {}),
data: {
index,
data,
Expand Down Expand Up @@ -227,7 +226,7 @@ async function getIndexPatternId(name) {

export const getExistingIndexNames = async () => {
const indexes = await httpService({
url: `${apiBasePath}/index_management/indices`,
url: `/api/index_management/indices`,
method: 'GET',
});
return indexes ? indexes.map(({ name }) => name) : [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ export function importDataProvider(callWithRequest) {
try {
const { id: pipelineId, pipeline } = ingestPipeline;

if (id === undefined) {
if (!id) {
// first chunk of data, create the index and id to return
id = uuid.v1();

Expand Down
14 changes: 3 additions & 11 deletions x-pack/legacy/plugins/file_upload/server/plugin.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,16 @@
* you may not use this file except in compliance with the Elastic License.
*/

import { getImportRouteHandler } from './routes/file_upload';
import { MAX_BYTES } from '../common/constants/file_import';
import { initRoutes } from './routes/file_upload';
import { registerFileUploadUsageCollector } from './telemetry';

export class FileUploadPlugin {
setup(core, plugins, __LEGACY) {
const elasticsearchPlugin = __LEGACY.plugins.elasticsearch;
const getSavedObjectsRepository = __LEGACY.savedObjects.getSavedObjectsRepository;
const router = core.http.createRouter();

// Set up route
__LEGACY.route({
method: 'POST',
path: '/api/fileupload/import',
handler: getImportRouteHandler(elasticsearchPlugin, getSavedObjectsRepository),
config: {
payload: { maxBytes: MAX_BYTES },
},
});
initRoutes(router, elasticsearchPlugin, getSavedObjectsRepository);

registerFileUploadUsageCollector(plugins.usageCollection, {
elasticsearchPlugin,
Expand Down
153 changes: 118 additions & 35 deletions x-pack/legacy/plugins/file_upload/server/routes/file_upload.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,43 +5,126 @@
*/

import { callWithRequestFactory } from '../client/call_with_request_factory';
import { wrapError } from '../client/errors';
import { importDataProvider } from '../models/import_data';
import { updateTelemetry } from '../telemetry/telemetry';
import { MAX_BYTES } from '../../common/constants/file_import';
import { schema } from '@kbn/config-schema';

function importData({ callWithRequest, id, index, settings, mappings, ingestPipeline, data }) {
const { importData: importDataFunc } = importDataProvider(callWithRequest);
return importDataFunc(id, index, settings, mappings, ingestPipeline, data);
}

export function getImportRouteHandler(elasticsearchPlugin, getSavedObjectsRepository) {
return async request => {
const requestObj = {
query: request.query,
payload: request.payload,
params: request.payload,
auth: request.auth,
headers: request.headers,
};

// `id` being `undefined` tells us that this is a new import due to create a new index.
// follow-up import calls to just add additional data will include the `id` of the created
// index, we'll ignore those and don't increment the counter.
const { id } = requestObj.query;
if (id === undefined) {
await updateTelemetry({ elasticsearchPlugin, getSavedObjectsRepository });
}
export const IMPORT_ROUTE = '/api/fileupload/import';

export const querySchema = schema.maybe(
schema.object({
id: schema.nullable(schema.string()),
})
);

export const bodySchema = schema.object(
{
app: schema.maybe(schema.string()),
index: schema.string(),
fileType: schema.string(),
ingestPipeline: schema.maybe(
schema.object(
{},
{
defaultValue: {},
allowUnknowns: true,
}
)
),
},
{ allowUnknowns: true }
);

const options = {
body: {
maxBytes: MAX_BYTES,
accepts: ['application/json'],
},
};

export const idConditionalValidation = (body, boolHasId) =>
schema
.object(
{
data: boolHasId
? schema.arrayOf(schema.object({}, { allowUnknowns: true }), { minSize: 1 })
: schema.any(),
settings: boolHasId
? schema.any()
: schema.object(
{},
{
defaultValue: {
number_of_shards: 1,
},
allowUnknowns: true,
}
),
mappings: boolHasId
? schema.any()
: schema.object(
{},
{
defaultValue: {},
allowUnknowns: true,
}
),
},
{ allowUnknowns: true }
)
.validate(body);

const requestContentWithDefaults = {
id,
callWithRequest: callWithRequestFactory(elasticsearchPlugin, requestObj),
index: undefined,
settings: {},
mappings: {},
ingestPipeline: {},
data: [],
...requestObj.payload,
};
return importData(requestContentWithDefaults).catch(wrapError);
const finishValidationAndProcessReq = (elasticsearchPlugin, getSavedObjectsRepository) => {
return async (con, req, { ok, badRequest }) => {
const {
query: { id },
body,
} = req;
const boolHasId = !!id;

let resp;
try {
const validIdReqData = idConditionalValidation(body, boolHasId);
const callWithRequest = callWithRequestFactory(elasticsearchPlugin, req);
const { importData: importDataFunc } = importDataProvider(callWithRequest);

const { index, settings, mappings, ingestPipeline, data } = validIdReqData;
const processedReq = await importDataFunc(
id,
index,
settings,
mappings,
ingestPipeline,
data
);

if (processedReq.success) {
resp = ok({ body: processedReq });
// If no id's been established then this is a new index, update telemetry
if (!boolHasId) {
await updateTelemetry({ elasticsearchPlugin, getSavedObjectsRepository });
}
} else {
resp = badRequest(`Error processing request 1: ${processedReq.error.message}`, ['body']);
}
} catch (e) {
resp = badRequest(`Error processing request 2: : ${e.message}`, ['body']);
}
return resp;
};
}
};

export const initRoutes = (router, esPlugin, getSavedObjectsRepository) => {
router.post(
{
path: `${IMPORT_ROUTE}{id?}`,
validate: {
query: querySchema,
body: bodySchema,
},
options,
},
finishValidationAndProcessReq(esPlugin, getSavedObjectsRepository)
);
};
Original file line number Diff line number Diff line change
@@ -0,0 +1,76 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/

import { querySchema, bodySchema, idConditionalValidation } from './file_upload';

const queryWithId = {
id: '123',
};

const bodyWithoutQueryId = {
index: 'islandofone',
data: [],
settings: { number_of_shards: 1 },
mappings: { coordinates: { type: 'geo_point' } },
ingestPipeline: {},
fileType: 'json',
app: 'Maps',
};

const bodyWithQueryId = {
index: 'islandofone2',
data: [{ coordinates: [], name: 'islandofone2' }],
settings: {},
mappings: {},
ingestPipeline: {},
fileType: 'json',
};

describe('route validation', () => {
it(`validates query with id`, async () => {
const validationResult = querySchema.validate(queryWithId);
expect(validationResult.id).toBe(queryWithId.id);
});

it(`validates query without id`, async () => {
const validationResult = querySchema.validate({});
expect(validationResult.id).toBeNull();
});

it(`throws when query contains content other than an id`, async () => {
expect(() => querySchema.validate({ notAnId: 123 })).toThrowError(
`[notAnId]: definition for this key is missing`
);
});

it(`validates body with valid fields`, async () => {
const validationResult = bodySchema.validate(bodyWithoutQueryId);
expect(validationResult).toEqual(bodyWithoutQueryId);
});

it(`throws if an expected field is missing`, async () => {
/* eslint-disable no-unused-vars */
const { index, ...bodyWithoutIndexField } = bodyWithoutQueryId;
expect(() => bodySchema.validate(bodyWithoutIndexField)).toThrowError(
`[index]: expected value of type [string] but got [undefined]`
);
});

it(`validates conditional fields when id has been provided in query`, async () => {
const validationResult = idConditionalValidation(bodyWithQueryId, true);
expect(validationResult).toEqual(bodyWithQueryId);
});

it(`validates conditional fields when no id has been provided in query`, async () => {
const validationResultWhenIdPresent = idConditionalValidation(bodyWithoutQueryId, false);
expect(validationResultWhenIdPresent).toEqual(bodyWithoutQueryId);
// Conditions for no id are more strict since this query sets up the index,
// expect it to throw if expected fields aren't present
expect(() => idConditionalValidation(bodyWithoutQueryId, true)).toThrowError(
`[data]: array size is [0], but cannot be smaller than [1]`
);
});
});

0 comments on commit 0a508b0

Please sign in to comment.