Skip to content

Commit

Permalink
Merge branch 'main' into close-flyout
Browse files Browse the repository at this point in the history
  • Loading branch information
BionIT authored Jun 6, 2024
2 parents 53be329 + cb84bfb commit 48ddefa
Show file tree
Hide file tree
Showing 18 changed files with 263 additions and 140 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/build_and_test_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,15 @@ on:
paths-ignore:
- '**/*.md'
- 'docs/**'
- '.lycheeignore'
- 'changelogs/fragments/**'
pull_request:
branches: ['**']
paths-ignore:
- '**/*.md'
- 'docs/**'
- '.lycheeignore'
- 'changelogs/fragments/**'

env:
TEST_BROWSER_HEADLESS: 1
Expand Down
2 changes: 2 additions & 0 deletions .github/workflows/cypress_workflow.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@ on:
branches: [ '**' ]
paths-ignore:
- '**/*.md'
- '.lycheeignore'
- 'changelogs/fragments/**'
workflow_dispatch:
inputs:
test_repo:
Expand Down
2 changes: 2 additions & 0 deletions changelogs/fragments/6899.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
feat:
- Remove endpoint validation for create data source saved object API ([#6899](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6899))
2 changes: 2 additions & 0 deletions changelogs/fragments/6928.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
feat:
- [MD]Use placeholder for data source credentials fields when export saved object ([#6928](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6928))
3 changes: 2 additions & 1 deletion config/opensearch_dashboards.yml
Original file line number Diff line number Diff line change
Expand Up @@ -320,6 +320,7 @@
# savedObjects.permission.enabled: true

# Set the value to true to enable workspace feature
# Please note, workspace will not work with multi-tenancy. To enable workspace feature, you need to disable multi-tenancy first with `opensearch_security.multitenancy.enabled: false`
# workspace.enabled: false

# Optional settings to specify saved object types to be deleted during migration.
Expand All @@ -338,4 +339,4 @@
# Set the backend roles in groups or users, whoever has the backend roles or exactly match the user ids defined in this config will be regard as dashboard admin.
# Dashboard admin will have the access to all the workspaces(workspace.enabled: true) and objects inside OpenSearch Dashboards.
# opensearchDashboards.dashboardAdmin.groups: ["dashboard_admin"]
# opensearchDashboards.dashboardAdmin.users: ["dashboard_admin"]
# opensearchDashboards.dashboardAdmin.users: ["dashboard_admin"]
17 changes: 17 additions & 0 deletions release-notes/opensearch-dashboards.release-notes-1.3.17.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
# Version 1.3.17 Release Notes

### 🛡 Security

### 📈 Features/Enhancements

### 🐛 Bug Fixes

- Replace control characters before logging ([#6590](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6590))

### 🚞 Infrastructure

### 📝 Documentation

### 🛠 Maintenance

- [Version] Increment version to 1.3.17 ([#6845](https://github.com/opensearch-project/OpenSearch-Dashboards/pull/6845))
2 changes: 2 additions & 0 deletions src/core/server/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -328,6 +328,8 @@ export {
SavedObjectsDeleteByWorkspaceOptions,
updateDataSourceNameInVegaSpec,
extractVegaSpecFromSavedObject,
extractTimelineExpression,
updateDataSourceNameInTimeline,
} from './saved_objects';

export {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,10 @@
* under the License.
*/

import { exportSavedObjectsToStream } from './get_sorted_objects_for_export';
import {
DATA_SOURCE_CREDENTIALS_PLACEHOLDER,
exportSavedObjectsToStream,
} from './get_sorted_objects_for_export';
import { savedObjectsClientMock } from '../service/saved_objects_client.mock';
import { Readable } from 'stream';
import { createPromiseFromStreams, createConcatStream } from '../../utils/streams';
Expand Down Expand Up @@ -706,6 +709,50 @@ describe('getSortedObjectsForExport()', () => {
]);
});

test('modifies return results to update `credentials` of data-source to use placeholder', async () => {
const createDataSourceSavedObject = (id: string, auth: any) => ({
id,
type: 'data-source',
attributes: { auth },
references: [],
});

const dataSourceNoAuthInfo = { type: 'no_auth' };
const dataSourceBasicAuthInfo = {
type: 'username_password',
credentials: { username: 'foo', password: 'bar' },
};

const redactedDataSourceBasicAuthInfo = {
type: 'username_password',
credentials: {
username: DATA_SOURCE_CREDENTIALS_PLACEHOLDER,
password: DATA_SOURCE_CREDENTIALS_PLACEHOLDER,
},
};

savedObjectsClient.bulkGet.mockResolvedValueOnce({
saved_objects: [
createDataSourceSavedObject('1', dataSourceNoAuthInfo),
createDataSourceSavedObject('2', dataSourceBasicAuthInfo),
],
});
const exportStream = await exportSavedObjectsToStream({
exportSizeLimit: 10000,
savedObjectsClient,
objects: [
{ type: 'data-source', id: '1' },
{ type: 'data-source', id: '2' },
],
});
const response = await readStreamToCompletion(exportStream);
expect(response).toEqual([
createDataSourceSavedObject('1', dataSourceNoAuthInfo),
createDataSourceSavedObject('2', redactedDataSourceBasicAuthInfo),
expect.objectContaining({ exportedCount: 2 }),
]);
});

test('includes nested dependencies when passed in', async () => {
savedObjectsClient.bulkGet.mockResolvedValueOnce({
saved_objects: [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ import { SavedObjectsClientContract, SavedObject, SavedObjectsBaseOptions } from
import { fetchNestedDependencies } from './inject_nested_depdendencies';
import { sortObjects } from './sort_objects';

export const DATA_SOURCE_CREDENTIALS_PLACEHOLDER = 'pleaseUpdateCredentials';

/**
* Options controlling the export operation.
* @public
Expand Down Expand Up @@ -185,10 +187,40 @@ export async function exportSavedObjectsToStream({
({ namespaces, ...object }) => object
);

// update the credential fields from "data-source" saved object to use placeholder to avoid exporting sensitive information
const redactedObjectsWithoutCredentials = redactedObjects.map<SavedObject<unknown>>((object) => {
if (object.type === 'data-source') {
const { auth, ...rest } = object.attributes as {
auth: { type: string; credentials?: any };
};
const hasCredentials = auth && auth.credentials;
const updatedCredentials = hasCredentials
? Object.keys(auth.credentials).reduce((acc, key) => {
acc[key] = DATA_SOURCE_CREDENTIALS_PLACEHOLDER;
return acc;
}, {} as { [key: string]: any })
: undefined;
return {
...object,
attributes: {
...rest,
auth: {
type: auth.type,
...(hasCredentials && { credentials: updatedCredentials }),
},
},
};
}
return object;
});

const exportDetails: SavedObjectsExportResultDetails = {
exportedCount: exportedObjects.length,
missingRefCount: missingReferences.length,
missingReferences,
};
return createListStream([...redactedObjects, ...(excludeExportDetails ? [] : [exportDetails])]);
return createListStream([
...redactedObjectsWithoutCredentials,
...(excludeExportDetails ? [] : [exportDetails]),
]);
}
7 changes: 6 additions & 1 deletion src/core/server/saved_objects/import/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,9 @@ export {
SavedObjectsResolveImportErrorsOptions,
SavedObjectsImportRetry,
} from './types';
export { updateDataSourceNameInVegaSpec, extractVegaSpecFromSavedObject } from './utils';
export {
updateDataSourceNameInVegaSpec,
extractVegaSpecFromSavedObject,
extractTimelineExpression,
updateDataSourceNameInTimeline,
} from './utils';
33 changes: 33 additions & 0 deletions src/core/server/saved_objects/import/utils.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import {
getDataSourceTitleFromId,
getUpdatedTSVBVisState,
updateDataSourceNameInVegaSpec,
updateDataSourceNameInTimeline,
} from './utils';
import { parse } from 'hjson';
import { isEqual } from 'lodash';
Expand Down Expand Up @@ -199,6 +200,38 @@ describe('updateDataSourceNameInVegaSpec()', () => {
});
});

describe('updateDataSourceNameInTimeline()', () => {
test('When a timeline expression does not contain a data source name, modify the expression', () => {
const expression =
'.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp).lines(show=true).points(show=true).yaxis(label="Average bytes")';
const expectedExpression =
'.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp, data_source_name="newDataSource").lines(show=true).points(show=true).yaxis(label="Average bytes")';
expect(updateDataSourceNameInTimeline(expression, 'newDataSource')).toBe(expectedExpression);
});

test('When a timeline expression contains a data source name, then do nothing', () => {
const expression =
'.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp, data_source_name=newDataSource).lines(show=true).points(show=true).yaxis(label="Average bytes")';
expect(updateDataSourceNameInTimeline(expression, 'newDataSource')).toBe(expression);
});

test('When a timeline expression contains multiple timeline expression, modify each of them', () => {
const expression =
'.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp,data_source_name=aos211).lines(show=true).points(show=true).yaxis(label="Average bytes"),.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp).lines(show=true).points(show=true).yaxis(label="Average bytes")';
const expectedExpression =
'.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp,data_source_name=aos211).lines(show=true).points(show=true).yaxis(label="Average bytes"),.opensearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp, data_source_name="aos211").lines(show=true).points(show=true).yaxis(label="Average bytes")';
expect(updateDataSourceNameInTimeline(expression, 'aos211')).toBe(expectedExpression);
});

test('When a timeline expression contains multiple timeline expression and the datasource name contains space, we modify each of them', () => {
const expression =
'.es(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp).lines(show=true).points(show=true).yaxis(label="Average bytes"),.elasticsearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp).lines(show=true).points(show=true).yaxis(label="Average bytes")';
const expectedExpression =
'.es(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp, data_source_name="aos 211").lines(show=true).points(show=true).yaxis(label="Average bytes"),.elasticsearch(opensearch_dashboards_sample_data_logs, metric=avg:bytes, timefield=@timestamp, data_source_name="aos 211").lines(show=true).points(show=true).yaxis(label="Average bytes")';
expect(updateDataSourceNameInTimeline(expression, 'aos 211')).toBe(expectedExpression);
});
});

describe('extractVegaSpecFromSavedObject()', () => {
test('For a Vega visualization saved object, return its spec', () => {
const spec = 'some-vega-spec';
Expand Down
39 changes: 36 additions & 3 deletions src/core/server/saved_objects/import/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -91,6 +91,25 @@ export const updateDataSourceNameInVegaSpec = (
});
};

export const updateDataSourceNameInTimeline = (
timelineExpression: string,
dataSourceTitle: string
) => {
const expressionRegex = /\.(opensearch|es|elasticsearch)\(([^)]*)\)/g;

const replaceCallback = (match: string, funcName: string, args: string) => {
if (!args.includes('data_source_name')) {
let expressionArgs = args.trim();
expressionArgs = `${expressionArgs}, data_source_name="${dataSourceTitle}"`;
return `.${funcName}(${expressionArgs})`;
}
return match;
};

const modifiedExpression = timelineExpression.replace(expressionRegex, replaceCallback);
return modifiedExpression;
};

export const getDataSourceTitleFromId = async (
dataSourceId: string,
savedObjectsClient: SavedObjectsClientContract
Expand All @@ -102,7 +121,7 @@ export const getDataSourceTitleFromId = async (
};

export const extractVegaSpecFromSavedObject = (savedObject: SavedObject) => {
if (isVegaVisualization(savedObject)) {
if (confirmVisualizationType(savedObject, 'vega')) {
// @ts-expect-error
const visStateObject = JSON.parse(savedObject.attributes?.visState);
return visStateObject.params.spec;
Expand All @@ -111,12 +130,26 @@ export const extractVegaSpecFromSavedObject = (savedObject: SavedObject) => {
return undefined;
};

const isVegaVisualization = (savedObject: SavedObject) => {
export const extractTimelineExpression = (savedObject: SavedObject) => {
if (!confirmVisualizationType(savedObject, 'timelion')) {
return undefined;
}
// @ts-expect-error
const visStateString = savedObject.attributes?.visState;
if (!visStateString) {
return undefined;
}

const visStateObject = JSON.parse(visStateString);
return visStateObject.params.expression;
};

const confirmVisualizationType = (savedObject: SavedObject, visualizationType: string) => {
// @ts-expect-error
const visState = savedObject.attributes?.visState;
if (!!visState) {
const visStateObject = JSON.parse(visState);
return !!visStateObject.type && visStateObject.type === 'vega';
return !!visStateObject.type && visStateObject.type === visualizationType;
}
return false;
};
Expand Down
16 changes: 0 additions & 16 deletions src/plugins/data_source/server/data_source_service.mock.ts

This file was deleted.

24 changes: 11 additions & 13 deletions src/plugins/data_source/server/plugin.ts
Original file line number Diff line number Diff line change
Expand Up @@ -61,26 +61,16 @@ export class DataSourcePlugin implements Plugin<DataSourcePluginSetup, DataSourc
const cryptographyServiceSetup: CryptographyServiceSetup = this.cryptographyService.setup(
config
);
const dataSourceServiceSetup: DataSourceServiceSetup = await this.dataSourceService.setup(
config
);

const authRegistryPromise = core.getStartServices().then(([, , selfStart]) => {
const dataSourcePluginStart = selfStart as DataSourcePluginStart;
return dataSourcePluginStart.getAuthenticationMethodRegistry();
});
const auditTrailPromise = core.getStartServices().then(([coreStart]) => coreStart.auditTrail);
const customApiSchemaRegistryPromise = core.getStartServices().then(([, , selfStart]) => {
const dataSourcePluginStart = selfStart as DataSourcePluginStart;
return dataSourcePluginStart.getCustomApiSchemaRegistry();
});

const dataSourceSavedObjectsClientWrapper = new DataSourceSavedObjectsClientWrapper(
dataSourceServiceSetup,
cryptographyServiceSetup,
this.logger.get('data-source-saved-objects-client-wrapper-factory'),
authRegistryPromise,
customApiSchemaRegistryPromise,
config.endpointDeniedIPs
);

Expand Down Expand Up @@ -114,12 +104,20 @@ export class DataSourcePlugin implements Plugin<DataSourcePluginSetup, DataSourc
},
};
core.auditTrail.register(auditorFactory);
const auditTrailPromise = core.getStartServices().then(([coreStart]) => coreStart.auditTrail);

const dataSourceService: DataSourceServiceSetup = await this.dataSourceService.setup(config);

const customApiSchemaRegistryPromise = core.getStartServices().then(([, , selfStart]) => {
const dataSourcePluginStart = selfStart as DataSourcePluginStart;
return dataSourcePluginStart.getCustomApiSchemaRegistry();
});

// Register data source plugin context to route handler context
core.http.registerRouteHandlerContext(
'dataSource',
this.createDataSourceRouteHandlerContext(
dataSourceServiceSetup,
dataSourceService,
cryptographyServiceSetup,
this.logger,
auditTrailPromise,
Expand All @@ -131,14 +129,14 @@ export class DataSourcePlugin implements Plugin<DataSourcePluginSetup, DataSourc
const router = core.http.createRouter();
registerTestConnectionRoute(
router,
dataSourceServiceSetup,
dataSourceService,
cryptographyServiceSetup,
authRegistryPromise,
customApiSchemaRegistryPromise
);
registerFetchDataSourceMetaDataRoute(
router,
dataSourceServiceSetup,
dataSourceService,
cryptographyServiceSetup,
authRegistryPromise,
customApiSchemaRegistryPromise
Expand Down
Loading

0 comments on commit 48ddefa

Please sign in to comment.