- {t('File Settings')}
+ {t('File settings')}
{t(
'Adjust how spaces, blank lines, null values are handled and other file wide settings.',
@@ -767,7 +777,7 @@ const UploadDataModal: FunctionComponent = ({
@@ -783,7 +793,7 @@ const UploadDataModal: FunctionComponent = ({
= ({
@@ -890,7 +900,7 @@ const UploadDataModal: FunctionComponent = ({
= ({
= ({
= ({
= ({
= ({
= ({
{t('Chosen non-numeric column')}
) : (
-
+
setFilterActive(true)}
onClosePopover={() => {
diff --git a/superset-frontend/src/filters/components/TimeColumn/TimeColumnFilterPlugin.tsx b/superset-frontend/src/filters/components/TimeColumn/TimeColumnFilterPlugin.tsx
index 805ecc4d0f765..247c19e4c4711 100644
--- a/superset-frontend/src/filters/components/TimeColumn/TimeColumnFilterPlugin.tsx
+++ b/superset-frontend/src/filters/components/TimeColumn/TimeColumnFilterPlugin.tsx
@@ -111,6 +111,7 @@ export default function PluginFilterTimeColumn(
{...formItemData}
>
diff --git a/superset-frontend/src/setup/setupFormatters.ts b/superset-frontend/src/setup/setupFormatters.ts
index 384b1be9e30ae..b8b181fb7f1ae 100644
--- a/superset-frontend/src/setup/setupFormatters.ts
+++ b/superset-frontend/src/setup/setupFormatters.ts
@@ -78,6 +78,10 @@ export default function setupFormatters(
'DURATION_SUB',
createDurationFormatter({ formatSubMilliseconds: true }),
)
+ .registerValue(
+ 'DURATION_COL',
+ createDurationFormatter({ colonNotation: true }),
+ )
.registerValue('MEMORY_DECIMAL', createMemoryFormatter({ binary: false }))
.registerValue('MEMORY_BINARY', createMemoryFormatter({ binary: true }));
diff --git a/superset-frontend/src/theme/index.ts b/superset-frontend/src/theme/index.ts
index 857e95888f935..6a2654b55fdd9 100644
--- a/superset-frontend/src/theme/index.ts
+++ b/superset-frontend/src/theme/index.ts
@@ -29,6 +29,9 @@ const themes = {
[ThemeType.LIGHT]: lightAlgorithm,
};
+// Want to figure out which tokens look like what? Try this!
+// https://ant.design/theme-editor
+
const baseConfig: ThemeConfig = {
token: {
borderRadius: supersetTheme.borderRadius,
@@ -56,7 +59,6 @@ const baseConfig: ThemeConfig = {
paddingXS: supersetTheme.gridUnit * 2,
},
Card: {
- colorBgContainer: supersetTheme.colors.grayscale.light4,
paddingLG: supersetTheme.gridUnit * 6,
fontWeightStrong: supersetTheme.typography.weights.medium,
},
@@ -75,6 +77,10 @@ const baseConfig: ThemeConfig = {
handleSizeHover: 10,
handleLineWidthHover: 2,
},
+ Switch: {
+ colorPrimaryHover: supersetTheme.colors.primary.base,
+ colorTextTertiary: supersetTheme.colors.grayscale.light1,
+ },
},
};
diff --git a/superset-frontend/src/types/dom-to-pdf.d.ts b/superset-frontend/src/types/dom-to-pdf.d.ts
new file mode 100644
index 0000000000000..061e80d96cede
--- /dev/null
+++ b/superset-frontend/src/types/dom-to-pdf.d.ts
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+declare module 'dom-to-pdf' {
+ interface Image {
+ type: string;
+ quality: number;
+ }
+
+ interface Options {
+ margin: number;
+ filename: string;
+ image: Image;
+ html2canvas: object;
+ excludeClassNames?: string[];
+ }
+
+ function domToPdf(elementToPrint: Element, options?: Options): Promise;
+
+ export default domToPdf;
+}
diff --git a/superset-frontend/src/utils/cacheWrapper.test.ts b/superset-frontend/src/utils/cacheWrapper.test.ts
index f53d925e55d83..2d39a3b85dfd4 100644
--- a/superset-frontend/src/utils/cacheWrapper.test.ts
+++ b/superset-frontend/src/utils/cacheWrapper.test.ts
@@ -37,8 +37,8 @@ describe('cacheWrapper', () => {
const returnedValue = wrappedFn(1, 2);
expect(returnedValue).toEqual(fnResult);
- expect(fn).toBeCalledTimes(1);
- expect(fn).toBeCalledWith(1, 2);
+ expect(fn).toHaveBeenCalledTimes(1);
+ expect(fn).toHaveBeenCalledWith(1, 2);
});
describe('subsequent calls', () => {
@@ -48,14 +48,14 @@ describe('cacheWrapper', () => {
expect(returnedValue1).toEqual(fnResult);
expect(returnedValue2).toEqual(fnResult);
- expect(fn).toBeCalledTimes(1);
+ expect(fn).toHaveBeenCalledTimes(1);
});
it('fn is called multiple times for different arguments', () => {
wrappedFn(1, 2);
wrappedFn(1, 3);
- expect(fn).toBeCalledTimes(2);
+ expect(fn).toHaveBeenCalledTimes(2);
});
});
@@ -77,7 +77,7 @@ describe('cacheWrapper', () => {
wrappedFn(1, 2);
wrappedFn(1, 3);
- expect(fn).toBeCalledTimes(1);
+ expect(fn).toHaveBeenCalledTimes(1);
});
});
});
diff --git a/superset-frontend/src/utils/downloadAsPdf.ts b/superset-frontend/src/utils/downloadAsPdf.ts
new file mode 100644
index 0000000000000..bb769d1eb117f
--- /dev/null
+++ b/superset-frontend/src/utils/downloadAsPdf.ts
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+import { SyntheticEvent } from 'react';
+import domToPdf from 'dom-to-pdf';
+import { kebabCase } from 'lodash';
+import { logging, t } from '@superset-ui/core';
+import { addWarningToast } from 'src/components/MessageToasts/actions';
+
+/**
+ * generate a consistent file stem from a description and date
+ *
+ * @param description title or description of content of file
+ * @param date date when file was generated
+ */
+const generateFileStem = (description: string, date = new Date()) =>
+ `${kebabCase(description)}-${date.toISOString().replace(/[: ]/g, '-')}`;
+
+/**
+ * Create an event handler for turning an element into an image
+ *
+ * @param selector css selector of the parent element which should be turned into image
+ * @param description name or a short description of what is being printed.
+ * Value will be normalized, and a date as well as a file extension will be added.
+ * @param isExactSelector if false, searches for the closest ancestor that matches selector.
+ * @returns event handler
+ */
+export default function downloadAsPdf(
+ selector: string,
+ description: string,
+ isExactSelector = false,
+) {
+ return (event: SyntheticEvent) => {
+ const elementToPrint = isExactSelector
+ ? document.querySelector(selector)
+ : event.currentTarget.closest(selector);
+
+ if (!elementToPrint) {
+ return addWarningToast(
+ t('PDF download failed, please refresh and try again.'),
+ );
+ }
+
+ const options = {
+ margin: 10,
+ filename: `${generateFileStem(description)}.pdf`,
+ image: { type: 'jpeg', quality: 1 },
+ html2canvas: { scale: 2 },
+ excludeClassNames: ['header-controls'],
+ };
+ return domToPdf(elementToPrint, options)
+ .then(() => {
+ // nothing to be done
+ })
+ .catch((e: Error) => {
+ logging.error('PDF generation failed', e);
+ });
+ };
+}
diff --git a/superset-frontend/src/utils/urlUtils.ts b/superset-frontend/src/utils/urlUtils.ts
index 2858d65a7de1c..49fab2dd1f109 100644
--- a/superset-frontend/src/utils/urlUtils.ts
+++ b/superset-frontend/src/utils/urlUtils.ts
@@ -123,7 +123,7 @@ function getChartUrlParams(excludedUrlParams?: string[]): UrlParamEntries {
return getUrlParamEntries(urlParams);
}
-function getDashboardUrlParams(): UrlParamEntries {
+export function getDashboardUrlParams(): UrlParamEntries {
const urlParams = getUrlParams(RESERVED_DASHBOARD_URL_PARAMS);
const filterBoxFilters = getActiveFilters();
if (!isEmpty(filterBoxFilters))
diff --git a/superset-websocket/package-lock.json b/superset-websocket/package-lock.json
index bf54fba8bf9b6..2312683319fa8 100644
--- a/superset-websocket/package-lock.json
+++ b/superset-websocket/package-lock.json
@@ -9,13 +9,13 @@
"version": "0.0.1",
"license": "Apache-2.0",
"dependencies": {
- "cookie": "^0.6.0",
+ "cookie": "^0.7.0",
"hot-shots": "^10.0.0",
"ioredis": "^4.28.0",
"jsonwebtoken": "^9.0.2",
"lodash": "^4.17.21",
- "uuid": "^10.0.0",
- "winston": "^3.13.0",
+ "uuid": "^11.0.2",
+ "winston": "^3.15.0",
"ws": "^8.18.0"
},
"devDependencies": {
@@ -31,7 +31,7 @@
"@types/ws": "^8.5.12",
"@typescript-eslint/eslint-plugin": "^8.8.0",
"@typescript-eslint/parser": "^8.6.0",
- "eslint": "^9.11.0",
+ "eslint": "^9.14.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-lodash": "^8.0.0",
"globals": "^15.9.0",
@@ -40,7 +40,7 @@
"ts-jest": "^29.2.5",
"ts-node": "^10.9.2",
"typescript": "^5.6.2",
- "typescript-eslint": "^8.8.0"
+ "typescript-eslint": "^8.12.2"
},
"engines": {
"node": "^16.9.1",
@@ -737,11 +737,10 @@
}
},
"node_modules/@eslint-community/regexpp": {
- "version": "4.11.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz",
- "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==",
+ "version": "4.12.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
+ "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
"dev": true,
- "license": "MIT",
"engines": {
"node": "^12.0.0 || ^14.0.0 || >=16.0.0"
}
@@ -761,6 +760,15 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
+ "node_modules/@eslint/core": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz",
+ "integrity": "sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==",
+ "dev": true,
+ "engines": {
+ "node": "^18.18.0 || ^20.9.0 || >=21.1.0"
+ }
+ },
"node_modules/@eslint/eslintrc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz",
@@ -819,11 +827,10 @@
}
},
"node_modules/@eslint/js": {
- "version": "9.11.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.0.tgz",
- "integrity": "sha512-LPkkenkDqyzTFauZLLAPhIb48fj6drrfMvRGSL9tS3AcZBSVTllemLSNyCvHNNL2t797S/6DJNSIwRwXgMO/eQ==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.14.0.tgz",
+ "integrity": "sha512-pFoEtFWCPyDOl+C6Ift+wC7Ro89otjigCf5vcuWqWgqNSQbRrpjSvdeE6ofLz4dHmyxD5f7gIdGT4+p36L6Twg==",
"dev": true,
- "license": "MIT",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
@@ -851,6 +858,41 @@
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
}
},
+ "node_modules/@humanfs/core": {
+ "version": "0.19.1",
+ "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
+ "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
+ "dev": true,
+ "engines": {
+ "node": ">=18.18.0"
+ }
+ },
+ "node_modules/@humanfs/node": {
+ "version": "0.16.6",
+ "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz",
+ "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
+ "dev": true,
+ "dependencies": {
+ "@humanfs/core": "^0.19.1",
+ "@humanwhocodes/retry": "^0.3.0"
+ },
+ "engines": {
+ "node": ">=18.18.0"
+ }
+ },
+ "node_modules/@humanfs/node/node_modules/@humanwhocodes/retry": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
+ "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
+ "dev": true,
+ "engines": {
+ "node": ">=18.18"
+ },
+ "funding": {
+ "type": "github",
+ "url": "https://github.com/sponsors/nzakas"
+ }
+ },
"node_modules/@humanwhocodes/module-importer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
@@ -865,11 +907,10 @@
}
},
"node_modules/@humanwhocodes/retry": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.0.tgz",
- "integrity": "sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew==",
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.0.tgz",
+ "integrity": "sha512-xnRgu9DxZbkWak/te3fcytNyp8MTbuiZIaueg2rgEvBuN55n04nwLYLU9TX/VVlusc9L2ZNXi99nUFNkHXtr5g==",
"dev": true,
- "license": "Apache-2.0",
"engines": {
"node": ">=18.18"
},
@@ -1817,16 +1858,16 @@
"dev": true
},
"node_modules/@typescript-eslint/eslint-plugin": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.0.tgz",
- "integrity": "sha512-wORFWjU30B2WJ/aXBfOm1LX9v9nyt9D3jsSOxC3cCaTQGCW5k4jNpmjFv3U7p/7s4yvdjHzwtv2Sd2dOyhjS0A==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.12.2.tgz",
+ "integrity": "sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==",
"dev": true,
"dependencies": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/type-utils": "8.8.0",
- "@typescript-eslint/utils": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/type-utils": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -1850,15 +1891,15 @@
}
},
"node_modules/@typescript-eslint/parser": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.0.tgz",
- "integrity": "sha512-uEFUsgR+tl8GmzmLjRqz+VrDv4eoaMqMXW7ruXfgThaAShO9JTciKpEsB+TvnfFfbg5IpujgMXVV36gOJRLtZg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz",
+ "integrity": "sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==",
"dev": true,
"dependencies": {
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/typescript-estree": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4"
},
"engines": {
@@ -1878,13 +1919,13 @@
}
},
"node_modules/@typescript-eslint/scope-manager": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz",
- "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
+ "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
"dev": true,
"dependencies": {
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0"
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1895,13 +1936,13 @@
}
},
"node_modules/@typescript-eslint/type-utils": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.0.tgz",
- "integrity": "sha512-IKwJSS7bCqyCeG4NVGxnOP6lLT9Okc3Zj8hLO96bpMkJab+10HIfJbMouLrlpyOr3yrQ1cA413YPFiGd1mW9/Q==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.12.2.tgz",
+ "integrity": "sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==",
"dev": true,
"dependencies": {
- "@typescript-eslint/typescript-estree": "8.8.0",
- "@typescript-eslint/utils": "8.8.0",
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
},
@@ -1919,9 +1960,9 @@
}
},
"node_modules/@typescript-eslint/types": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz",
- "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
"dev": true,
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -1932,13 +1973,13 @@
}
},
"node_modules/@typescript-eslint/typescript-estree": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz",
- "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
+ "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
"dev": true,
"dependencies": {
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -1984,15 +2025,15 @@
}
},
"node_modules/@typescript-eslint/utils": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.0.tgz",
- "integrity": "sha512-QE2MgfOTem00qrlPgyByaCHay9yb1+9BjnMFnSFkUKQfu7adBXDTnCAivURnuPPAG/qiB+kzKkZKmKfaMT0zVg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz",
+ "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==",
"dev": true,
"dependencies": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/typescript-estree": "8.8.0"
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2006,12 +2047,12 @@
}
},
"node_modules/@typescript-eslint/visitor-keys": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz",
- "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
"dev": true,
"dependencies": {
- "@typescript-eslint/types": "8.8.0",
+ "@typescript-eslint/types": "8.12.2",
"eslint-visitor-keys": "^3.4.3"
},
"engines": {
@@ -2023,11 +2064,10 @@
}
},
"node_modules/acorn": {
- "version": "8.12.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
- "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
+ "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true,
- "license": "MIT",
"bin": {
"acorn": "bin/acorn"
},
@@ -2482,9 +2522,9 @@
"dev": true
},
"node_modules/cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.0.tgz",
+ "integrity": "sha512-qCf+V4dtlNhSRXGAZatc1TasyFO6GjohcOul807YOb5ik3+kQSnb4d7iajeCL8QHaJ4uZEjCgiCJerKXwdRVlQ==",
"engines": {
"node": ">= 0.6"
}
@@ -2681,29 +2721,31 @@
}
},
"node_modules/eslint": {
- "version": "9.11.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.0.tgz",
- "integrity": "sha512-yVS6XODx+tMFMDFcG4+Hlh+qG7RM6cCJXtQhCKLSsr3XkLvWggHjCqjfh0XsPPnt1c56oaT6PMgW9XWQQjdHXA==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.14.0.tgz",
+ "integrity": "sha512-c2FHsVBr87lnUtjP4Yhvk4yEhKrQavGafRA/Se1ouse8PfbfC/Qh9Mxa00yWsZRlqeUB9raXip0aiiUZkgnr9g==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.11.0",
+ "@eslint-community/regexpp": "^4.12.1",
"@eslint/config-array": "^0.18.0",
+ "@eslint/core": "^0.7.0",
"@eslint/eslintrc": "^3.1.0",
- "@eslint/js": "9.11.0",
+ "@eslint/js": "9.14.0",
"@eslint/plugin-kit": "^0.2.0",
+ "@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
- "@humanwhocodes/retry": "^0.3.0",
- "@nodelib/fs.walk": "^1.2.8",
+ "@humanwhocodes/retry": "^0.4.0",
+ "@types/estree": "^1.0.6",
+ "@types/json-schema": "^7.0.15",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
"cross-spawn": "^7.0.2",
"debug": "^4.3.2",
"escape-string-regexp": "^4.0.0",
- "eslint-scope": "^8.0.2",
- "eslint-visitor-keys": "^4.0.0",
- "espree": "^10.1.0",
+ "eslint-scope": "^8.2.0",
+ "eslint-visitor-keys": "^4.2.0",
+ "espree": "^10.3.0",
"esquery": "^1.5.0",
"esutils": "^2.0.2",
"fast-deep-equal": "^3.1.3",
@@ -2713,13 +2755,11 @@
"ignore": "^5.2.0",
"imurmurhash": "^0.1.4",
"is-glob": "^4.0.0",
- "is-path-inside": "^3.0.3",
"json-stable-stringify-without-jsonify": "^1.0.1",
"lodash.merge": "^4.6.2",
"minimatch": "^3.1.2",
"natural-compare": "^1.4.0",
"optionator": "^0.9.3",
- "strip-ansi": "^6.0.1",
"text-table": "^0.2.0"
},
"bin": {
@@ -2769,11 +2809,10 @@
}
},
"node_modules/eslint-scope": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.0.2.tgz",
- "integrity": "sha512-6E4xmrTw5wtxnLA5wYL3WDfhZ/1bUBGOXV0zQvVRDOtrR8D0p6W7fs3JweNYhwRYeGvd/1CKX2se0/2s7Q/nJA==",
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
+ "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
"esrecurse": "^4.3.0",
"estraverse": "^5.2.0"
@@ -2810,11 +2849,10 @@
}
},
"node_modules/eslint/node_modules/eslint-visitor-keys": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz",
- "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
- "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -2913,15 +2951,14 @@
}
},
"node_modules/espree": {
- "version": "10.1.0",
- "resolved": "https://registry.npmjs.org/espree/-/espree-10.1.0.tgz",
- "integrity": "sha512-M1M6CpiE6ffoigIOWYO9UDP8TMUw9kqb21tf+08IgDYjCsOvCuDt4jQcZmoYxx+w7zlKw9/N0KXfto+I8/FrXA==",
+ "version": "10.3.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
+ "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
- "acorn": "^8.12.0",
+ "acorn": "^8.14.0",
"acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^4.0.0"
+ "eslint-visitor-keys": "^4.2.0"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -2931,11 +2968,10 @@
}
},
"node_modules/espree/node_modules/eslint-visitor-keys": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz",
- "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true,
- "license": "Apache-2.0",
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
},
@@ -2974,7 +3010,6 @@
"resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz",
"integrity": "sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag==",
"dev": true,
- "license": "BSD-2-Clause",
"dependencies": {
"estraverse": "^5.2.0"
},
@@ -3071,7 +3106,6 @@
"resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz",
"integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==",
"dev": true,
- "license": "MIT",
"dependencies": {
"@nodelib/fs.stat": "^2.0.2",
"@nodelib/fs.walk": "^1.2.3",
@@ -3308,7 +3342,6 @@
"resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz",
"integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==",
"dev": true,
- "license": "ISC",
"dependencies": {
"is-glob": "^4.0.1"
},
@@ -3559,15 +3592,6 @@
"node": ">=0.12.0"
}
},
- "node_modules/is-path-inside": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
- "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
- "dev": true,
- "engines": {
- "node": ">=8"
- }
- },
"node_modules/is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
@@ -5182,7 +5206,6 @@
"resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz",
"integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==",
"dev": true,
- "license": "MIT",
"engines": {
"node": ">= 8"
}
@@ -6152,14 +6175,14 @@
}
},
"node_modules/typescript-eslint": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.8.0.tgz",
- "integrity": "sha512-BjIT/VwJ8+0rVO01ZQ2ZVnjE1svFBiRczcpr1t1Yxt7sT25VSbPfrJtDsQ8uQTy2pilX5nI9gwxhUyLULNentw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.12.2.tgz",
+ "integrity": "sha512-UbuVUWSrHVR03q9CWx+JDHeO6B/Hr9p4U5lRH++5tq/EbFq1faYZe50ZSBePptgfIKLEti0aPQ3hFgnPVcd8ZQ==",
"dev": true,
"dependencies": {
- "@typescript-eslint/eslint-plugin": "8.8.0",
- "@typescript-eslint/parser": "8.8.0",
- "@typescript-eslint/utils": "8.8.0"
+ "@typescript-eslint/eslint-plugin": "8.12.2",
+ "@typescript-eslint/parser": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2"
},
"engines": {
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
@@ -6240,15 +6263,15 @@
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"node_modules/uuid": {
- "version": "10.0.0",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
- "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==",
+ "version": "11.0.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.2.tgz",
+ "integrity": "sha512-14FfcOJmqdjbBPdDjFQyk/SdT4NySW4eM0zcG+HqbHP5jzuH56xO3J1DGhgs/cEMCfwYi3HQI1gnTO62iaG+tQ==",
"funding": [
"https://github.com/sponsors/broofa",
"https://github.com/sponsors/ctavan"
],
"bin": {
- "uuid": "dist/bin/uuid"
+ "uuid": "dist/esm/bin/uuid"
}
},
"node_modules/v8-compile-cache-lib": {
@@ -6312,15 +6335,15 @@
}
},
"node_modules/winston": {
- "version": "3.13.0",
- "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz",
- "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==",
+ "version": "3.15.0",
+ "resolved": "https://registry.npmjs.org/winston/-/winston-3.15.0.tgz",
+ "integrity": "sha512-RhruH2Cj0bV0WgNL+lOfoUBI4DVfdUNjVnJGVovWZmrcKtrFTTRzgXYK2O9cymSGjrERCtaAeHwMNnUWXlwZow==",
"dependencies": {
"@colors/colors": "^1.6.0",
"@dabh/diagnostics": "^2.0.2",
"async": "^3.2.3",
"is-stream": "^2.0.0",
- "logform": "^2.4.0",
+ "logform": "^2.6.0",
"one-time": "^1.0.0",
"readable-stream": "^3.4.0",
"safe-stable-stringify": "^2.3.1",
@@ -6978,9 +7001,9 @@
}
},
"@eslint-community/regexpp": {
- "version": "4.11.1",
- "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz",
- "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==",
+ "version": "4.12.1",
+ "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.1.tgz",
+ "integrity": "sha512-CCZCDJuduB9OUkFkY2IgppNZMi2lBQgD2qzwXkEia16cge2pijY/aXi96CJMquDMn3nJdlPV1A5KrJEXwfLNzQ==",
"dev": true
},
"@eslint/config-array": {
@@ -6994,6 +7017,12 @@
"minimatch": "^3.1.2"
}
},
+ "@eslint/core": {
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/@eslint/core/-/core-0.7.0.tgz",
+ "integrity": "sha512-xp5Jirz5DyPYlPiKat8jaq0EmYvDXKKpzTbxXMpT9eqlRJkRKIz9AGMdlvYjih+im+QlhWrpvVjl8IPC/lHlUw==",
+ "dev": true
+ },
"@eslint/eslintrc": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.1.0.tgz",
@@ -7035,9 +7064,9 @@
}
},
"@eslint/js": {
- "version": "9.11.0",
- "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.11.0.tgz",
- "integrity": "sha512-LPkkenkDqyzTFauZLLAPhIb48fj6drrfMvRGSL9tS3AcZBSVTllemLSNyCvHNNL2t797S/6DJNSIwRwXgMO/eQ==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/@eslint/js/-/js-9.14.0.tgz",
+ "integrity": "sha512-pFoEtFWCPyDOl+C6Ift+wC7Ro89otjigCf5vcuWqWgqNSQbRrpjSvdeE6ofLz4dHmyxD5f7gIdGT4+p36L6Twg==",
"dev": true
},
"@eslint/object-schema": {
@@ -7055,6 +7084,30 @@
"levn": "^0.4.1"
}
},
+ "@humanfs/core": {
+ "version": "0.19.1",
+ "resolved": "https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz",
+ "integrity": "sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA==",
+ "dev": true
+ },
+ "@humanfs/node": {
+ "version": "0.16.6",
+ "resolved": "https://registry.npmjs.org/@humanfs/node/-/node-0.16.6.tgz",
+ "integrity": "sha512-YuI2ZHQL78Q5HbhDiBA1X4LmYdXCKCMQIfw0pw7piHJwyREFebJUvrQN4cMssyES6x+vfUbx1CIpaQUKYdQZOw==",
+ "dev": true,
+ "requires": {
+ "@humanfs/core": "^0.19.1",
+ "@humanwhocodes/retry": "^0.3.0"
+ },
+ "dependencies": {
+ "@humanwhocodes/retry": {
+ "version": "0.3.1",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.1.tgz",
+ "integrity": "sha512-JBxkERygn7Bv/GbN5Rv8Ul6LVknS+5Bp6RgDC/O8gEBU/yeH5Ui5C/OlWrTb6qct7LjjfT6Re2NxB0ln0yYybA==",
+ "dev": true
+ }
+ }
+ },
"@humanwhocodes/module-importer": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz",
@@ -7062,9 +7115,9 @@
"dev": true
},
"@humanwhocodes/retry": {
- "version": "0.3.0",
- "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.3.0.tgz",
- "integrity": "sha512-d2CGZR2o7fS6sWB7DG/3a95bGKQyHMACZ5aW8qGkkqQpUoZV6C0X7Pc7l4ZNMZkfNBf4VWNe9E1jRsf0G146Ew==",
+ "version": "0.4.0",
+ "resolved": "https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.0.tgz",
+ "integrity": "sha512-xnRgu9DxZbkWak/te3fcytNyp8MTbuiZIaueg2rgEvBuN55n04nwLYLU9TX/VVlusc9L2ZNXi99nUFNkHXtr5g==",
"dev": true
},
"@istanbuljs/load-nyc-config": {
@@ -7869,16 +7922,16 @@
"dev": true
},
"@typescript-eslint/eslint-plugin": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.8.0.tgz",
- "integrity": "sha512-wORFWjU30B2WJ/aXBfOm1LX9v9nyt9D3jsSOxC3cCaTQGCW5k4jNpmjFv3U7p/7s4yvdjHzwtv2Sd2dOyhjS0A==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.12.2.tgz",
+ "integrity": "sha512-gQxbxM8mcxBwaEmWdtLCIGLfixBMHhQjBqR8sVWNTPpcj45WlYL2IObS/DNMLH1DBP0n8qz+aiiLTGfopPEebw==",
"dev": true,
"requires": {
"@eslint-community/regexpp": "^4.10.0",
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/type-utils": "8.8.0",
- "@typescript-eslint/utils": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/type-utils": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"graphemer": "^1.4.0",
"ignore": "^5.3.1",
"natural-compare": "^1.4.0",
@@ -7886,54 +7939,54 @@
}
},
"@typescript-eslint/parser": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.8.0.tgz",
- "integrity": "sha512-uEFUsgR+tl8GmzmLjRqz+VrDv4eoaMqMXW7ruXfgThaAShO9JTciKpEsB+TvnfFfbg5IpujgMXVV36gOJRLtZg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.12.2.tgz",
+ "integrity": "sha512-MrvlXNfGPLH3Z+r7Tk+Z5moZAc0dzdVjTgUgwsdGweH7lydysQsnSww3nAmsq8blFuRD5VRlAr9YdEFw3e6PBw==",
"dev": true,
"requires": {
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/typescript-estree": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4"
}
},
"@typescript-eslint/scope-manager": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.8.0.tgz",
- "integrity": "sha512-EL8eaGC6gx3jDd8GwEFEV091210U97J0jeEHrAYvIYosmEGet4wJ+g0SYmLu+oRiAwbSA5AVrt6DxLHfdd+bUg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.12.2.tgz",
+ "integrity": "sha512-gPLpLtrj9aMHOvxJkSbDBmbRuYdtiEbnvO25bCMza3DhMjTQw0u7Y1M+YR5JPbMsXXnSPuCf5hfq0nEkQDL/JQ==",
"dev": true,
"requires": {
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0"
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2"
}
},
"@typescript-eslint/type-utils": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.8.0.tgz",
- "integrity": "sha512-IKwJSS7bCqyCeG4NVGxnOP6lLT9Okc3Zj8hLO96bpMkJab+10HIfJbMouLrlpyOr3yrQ1cA413YPFiGd1mW9/Q==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.12.2.tgz",
+ "integrity": "sha512-bwuU4TAogPI+1q/IJSKuD4shBLc/d2vGcRT588q+jzayQyjVK2X6v/fbR4InY2U2sgf8MEvVCqEWUzYzgBNcGQ==",
"dev": true,
"requires": {
- "@typescript-eslint/typescript-estree": "8.8.0",
- "@typescript-eslint/utils": "8.8.0",
+ "@typescript-eslint/typescript-estree": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2",
"debug": "^4.3.4",
"ts-api-utils": "^1.3.0"
}
},
"@typescript-eslint/types": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.8.0.tgz",
- "integrity": "sha512-QJwc50hRCgBd/k12sTykOJbESe1RrzmX6COk8Y525C9l7oweZ+1lw9JiU56im7Amm8swlz00DRIlxMYLizr2Vw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/types/-/types-8.12.2.tgz",
+ "integrity": "sha512-VwDwMF1SZ7wPBUZwmMdnDJ6sIFk4K4s+ALKLP6aIQsISkPv8jhiw65sAK6SuWODN/ix+m+HgbYDkH+zLjrzvOA==",
"dev": true
},
"@typescript-eslint/typescript-estree": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.8.0.tgz",
- "integrity": "sha512-ZaMJwc/0ckLz5DaAZ+pNLmHv8AMVGtfWxZe/x2JVEkD5LnmhWiQMMcYT7IY7gkdJuzJ9P14fRy28lUrlDSWYdw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.12.2.tgz",
+ "integrity": "sha512-mME5MDwGe30Pq9zKPvyduyU86PH7aixwqYR2grTglAdB+AN8xXQ1vFGpYaUSJ5o5P/5znsSBeNcs5g5/2aQwow==",
"dev": true,
"requires": {
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/visitor-keys": "8.8.0",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/visitor-keys": "8.12.2",
"debug": "^4.3.4",
"fast-glob": "^3.3.2",
"is-glob": "^4.0.3",
@@ -7963,31 +8016,31 @@
}
},
"@typescript-eslint/utils": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.8.0.tgz",
- "integrity": "sha512-QE2MgfOTem00qrlPgyByaCHay9yb1+9BjnMFnSFkUKQfu7adBXDTnCAivURnuPPAG/qiB+kzKkZKmKfaMT0zVg==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.12.2.tgz",
+ "integrity": "sha512-UTTuDIX3fkfAz6iSVa5rTuSfWIYZ6ATtEocQ/umkRSyC9O919lbZ8dcH7mysshrCdrAM03skJOEYaBugxN+M6A==",
"dev": true,
"requires": {
"@eslint-community/eslint-utils": "^4.4.0",
- "@typescript-eslint/scope-manager": "8.8.0",
- "@typescript-eslint/types": "8.8.0",
- "@typescript-eslint/typescript-estree": "8.8.0"
+ "@typescript-eslint/scope-manager": "8.12.2",
+ "@typescript-eslint/types": "8.12.2",
+ "@typescript-eslint/typescript-estree": "8.12.2"
}
},
"@typescript-eslint/visitor-keys": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.8.0.tgz",
- "integrity": "sha512-8mq51Lx6Hpmd7HnA2fcHQo3YgfX1qbccxQOgZcb4tvasu//zXRaA1j5ZRFeCw/VRAdFi4mRM9DnZw0Nu0Q2d1g==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.12.2.tgz",
+ "integrity": "sha512-PChz8UaKQAVNHghsHcPyx1OMHoFRUEA7rJSK/mDhdq85bk+PLsUHUBqTQTFt18VJZbmxBovM65fezlheQRsSDA==",
"dev": true,
"requires": {
- "@typescript-eslint/types": "8.8.0",
+ "@typescript-eslint/types": "8.12.2",
"eslint-visitor-keys": "^3.4.3"
}
},
"acorn": {
- "version": "8.12.1",
- "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.12.1.tgz",
- "integrity": "sha512-tcpGyI9zbizT9JbV6oYE477V6mTlXvvi0T0G3SNIYE2apm/G5huBa1+K89VGeovbg+jycCrfhl3ADxErOuO6Jg==",
+ "version": "8.14.0",
+ "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.14.0.tgz",
+ "integrity": "sha512-cl669nCJTZBsL97OF4kUQm5g5hC2uihk0NxY3WENAC0TYdILVkAyHymAntgxGkl7K+t0cXIrH5siy5S4XkFycA==",
"dev": true
},
"acorn-jsx": {
@@ -8337,9 +8390,9 @@
"dev": true
},
"cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="
+ "version": "0.7.0",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.0.tgz",
+ "integrity": "sha512-qCf+V4dtlNhSRXGAZatc1TasyFO6GjohcOul807YOb5ik3+kQSnb4d7iajeCL8QHaJ4uZEjCgiCJerKXwdRVlQ=="
},
"create-jest": {
"version": "29.7.0",
@@ -8479,28 +8532,31 @@
"dev": true
},
"eslint": {
- "version": "9.11.0",
- "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.11.0.tgz",
- "integrity": "sha512-yVS6XODx+tMFMDFcG4+Hlh+qG7RM6cCJXtQhCKLSsr3XkLvWggHjCqjfh0XsPPnt1c56oaT6PMgW9XWQQjdHXA==",
+ "version": "9.14.0",
+ "resolved": "https://registry.npmjs.org/eslint/-/eslint-9.14.0.tgz",
+ "integrity": "sha512-c2FHsVBr87lnUtjP4Yhvk4yEhKrQavGafRA/Se1ouse8PfbfC/Qh9Mxa00yWsZRlqeUB9raXip0aiiUZkgnr9g==",
"dev": true,
"requires": {
"@eslint-community/eslint-utils": "^4.2.0",
- "@eslint-community/regexpp": "^4.11.0",
+ "@eslint-community/regexpp": "^4.12.1",
"@eslint/config-array": "^0.18.0",
+ "@eslint/core": "^0.7.0",
"@eslint/eslintrc": "^3.1.0",
- "@eslint/js": "9.11.0",
+ "@eslint/js": "9.14.0",
"@eslint/plugin-kit": "^0.2.0",
+ "@humanfs/node": "^0.16.6",
"@humanwhocodes/module-importer": "^1.0.1",
- "@humanwhocodes/retry": "^0.3.0",
- "@nodelib/fs.walk": "^1.2.8",
+ "@humanwhocodes/retry": "^0.4.0",
+ "@types/estree": "^1.0.6",
+ "@types/json-schema": "^7.0.15",
"ajv": "^6.12.4",
"chalk": "^4.0.0",
"cross-spawn": "^7.0.2",
"debug": "^4.3.2",
"escape-string-regexp": "^4.0.0",
- "eslint-scope": "^8.0.2",
- "eslint-visitor-keys": "^4.0.0",
- "espree": "^10.1.0",
+ "eslint-scope": "^8.2.0",
+ "eslint-visitor-keys": "^4.2.0",
+ "espree": "^10.3.0",
"esquery": "^1.5.0",
"esutils": "^2.0.2",
"fast-deep-equal": "^3.1.3",
@@ -8510,13 +8566,11 @@
"ignore": "^5.2.0",
"imurmurhash": "^0.1.4",
"is-glob": "^4.0.0",
- "is-path-inside": "^3.0.3",
"json-stable-stringify-without-jsonify": "^1.0.1",
"lodash.merge": "^4.6.2",
"minimatch": "^3.1.2",
"natural-compare": "^1.4.0",
"optionator": "^0.9.3",
- "strip-ansi": "^6.0.1",
"text-table": "^0.2.0"
},
"dependencies": {
@@ -8527,9 +8581,9 @@
"dev": true
},
"eslint-visitor-keys": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz",
- "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true
},
"find-up": {
@@ -8611,9 +8665,9 @@
}
},
"eslint-scope": {
- "version": "8.0.2",
- "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.0.2.tgz",
- "integrity": "sha512-6E4xmrTw5wtxnLA5wYL3WDfhZ/1bUBGOXV0zQvVRDOtrR8D0p6W7fs3JweNYhwRYeGvd/1CKX2se0/2s7Q/nJA==",
+ "version": "8.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.2.0.tgz",
+ "integrity": "sha512-PHlWUfG6lvPc3yvP5A4PNyBL1W8fkDUccmI21JUu/+GKZBoH/W5u6usENXUrWFRsyoW5ACUjFGgAFQp5gUlb/A==",
"dev": true,
"requires": {
"esrecurse": "^4.3.0",
@@ -8627,20 +8681,20 @@
"dev": true
},
"espree": {
- "version": "10.1.0",
- "resolved": "https://registry.npmjs.org/espree/-/espree-10.1.0.tgz",
- "integrity": "sha512-M1M6CpiE6ffoigIOWYO9UDP8TMUw9kqb21tf+08IgDYjCsOvCuDt4jQcZmoYxx+w7zlKw9/N0KXfto+I8/FrXA==",
+ "version": "10.3.0",
+ "resolved": "https://registry.npmjs.org/espree/-/espree-10.3.0.tgz",
+ "integrity": "sha512-0QYC8b24HWY8zjRnDTL6RiHfDbAWn63qb4LMj1Z4b076A4une81+z03Kg7l7mn/48PUTqoLptSXez8oknU8Clg==",
"dev": true,
"requires": {
- "acorn": "^8.12.0",
+ "acorn": "^8.14.0",
"acorn-jsx": "^5.3.2",
- "eslint-visitor-keys": "^4.0.0"
+ "eslint-visitor-keys": "^4.2.0"
},
"dependencies": {
"eslint-visitor-keys": {
- "version": "4.0.0",
- "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.0.0.tgz",
- "integrity": "sha512-OtIRv/2GyiF6o/d8K7MYKKbXrOUBIK6SfkIRM4Z0dY3w+LiQ0vy3F57m0Z71bjbyeiWFiHJ8brqnmE6H6/jEuw==",
+ "version": "4.2.0",
+ "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.0.tgz",
+ "integrity": "sha512-UyLnSehNt62FFhSwjZlHmeokpRK59rcz29j+F1/aDgbkbRTk7wIc9XzdoasMUbRNKDM0qQt/+BJ4BrpFeABemw==",
"dev": true
}
}
@@ -9103,12 +9157,6 @@
"integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==",
"dev": true
},
- "is-path-inside": {
- "version": "3.0.3",
- "resolved": "https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz",
- "integrity": "sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ==",
- "dev": true
- },
"is-stream": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/is-stream/-/is-stream-2.0.1.tgz",
@@ -11039,14 +11087,14 @@
"dev": true
},
"typescript-eslint": {
- "version": "8.8.0",
- "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.8.0.tgz",
- "integrity": "sha512-BjIT/VwJ8+0rVO01ZQ2ZVnjE1svFBiRczcpr1t1Yxt7sT25VSbPfrJtDsQ8uQTy2pilX5nI9gwxhUyLULNentw==",
+ "version": "8.12.2",
+ "resolved": "https://registry.npmjs.org/typescript-eslint/-/typescript-eslint-8.12.2.tgz",
+ "integrity": "sha512-UbuVUWSrHVR03q9CWx+JDHeO6B/Hr9p4U5lRH++5tq/EbFq1faYZe50ZSBePptgfIKLEti0aPQ3hFgnPVcd8ZQ==",
"dev": true,
"requires": {
- "@typescript-eslint/eslint-plugin": "8.8.0",
- "@typescript-eslint/parser": "8.8.0",
- "@typescript-eslint/utils": "8.8.0"
+ "@typescript-eslint/eslint-plugin": "8.12.2",
+ "@typescript-eslint/parser": "8.12.2",
+ "@typescript-eslint/utils": "8.12.2"
}
},
"undici-types": {
@@ -11090,9 +11138,9 @@
"integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="
},
"uuid": {
- "version": "10.0.0",
- "resolved": "https://registry.npmjs.org/uuid/-/uuid-10.0.0.tgz",
- "integrity": "sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ=="
+ "version": "11.0.2",
+ "resolved": "https://registry.npmjs.org/uuid/-/uuid-11.0.2.tgz",
+ "integrity": "sha512-14FfcOJmqdjbBPdDjFQyk/SdT4NySW4eM0zcG+HqbHP5jzuH56xO3J1DGhgs/cEMCfwYi3HQI1gnTO62iaG+tQ=="
},
"v8-compile-cache-lib": {
"version": "3.0.1",
@@ -11148,15 +11196,15 @@
}
},
"winston": {
- "version": "3.13.0",
- "resolved": "https://registry.npmjs.org/winston/-/winston-3.13.0.tgz",
- "integrity": "sha512-rwidmA1w3SE4j0E5MuIufFhyJPBDG7Nu71RkZor1p2+qHvJSZ9GYDA81AyleQcZbh/+V6HjeBdfnTZJm9rSeQQ==",
+ "version": "3.15.0",
+ "resolved": "https://registry.npmjs.org/winston/-/winston-3.15.0.tgz",
+ "integrity": "sha512-RhruH2Cj0bV0WgNL+lOfoUBI4DVfdUNjVnJGVovWZmrcKtrFTTRzgXYK2O9cymSGjrERCtaAeHwMNnUWXlwZow==",
"requires": {
"@colors/colors": "^1.6.0",
"@dabh/diagnostics": "^2.0.2",
"async": "^3.2.3",
"is-stream": "^2.0.0",
- "logform": "^2.4.0",
+ "logform": "^2.6.0",
"one-time": "^1.0.0",
"readable-stream": "^3.4.0",
"safe-stable-stringify": "^2.3.1",
diff --git a/superset-websocket/package.json b/superset-websocket/package.json
index 7af0d3c57427b..1a78765369c88 100644
--- a/superset-websocket/package.json
+++ b/superset-websocket/package.json
@@ -17,13 +17,13 @@
},
"license": "Apache-2.0",
"dependencies": {
- "cookie": "^0.6.0",
+ "cookie": "^0.7.0",
"hot-shots": "^10.0.0",
"ioredis": "^4.28.0",
"jsonwebtoken": "^9.0.2",
"lodash": "^4.17.21",
- "uuid": "^10.0.0",
- "winston": "^3.13.0",
+ "uuid": "^11.0.2",
+ "winston": "^3.15.0",
"ws": "^8.18.0"
},
"devDependencies": {
@@ -39,7 +39,7 @@
"@types/ws": "^8.5.12",
"@typescript-eslint/eslint-plugin": "^8.8.0",
"@typescript-eslint/parser": "^8.6.0",
- "eslint": "^9.11.0",
+ "eslint": "^9.14.0",
"eslint-config-prettier": "^9.1.0",
"eslint-plugin-lodash": "^8.0.0",
"globals": "^15.9.0",
@@ -48,7 +48,7 @@
"ts-jest": "^29.2.5",
"ts-node": "^10.9.2",
"typescript": "^5.6.2",
- "typescript-eslint": "^8.8.0"
+ "typescript-eslint": "^8.12.2"
},
"engines": {
"node": "^16.9.1",
diff --git a/superset-websocket/spec/index.test.ts b/superset-websocket/spec/index.test.ts
index 1643c9f6ac8b1..20a399d395c61 100644
--- a/superset-websocket/spec/index.test.ts
+++ b/superset-websocket/spec/index.test.ts
@@ -94,10 +94,10 @@ describe('server', () => {
response as unknown as http.ServerResponse,
);
- expect(writeHeadMock).toBeCalledTimes(1);
+ expect(writeHeadMock).toHaveBeenCalledTimes(1);
expect(writeHeadMock).toHaveBeenLastCalledWith(200);
- expect(endMock).toBeCalledTimes(1);
+ expect(endMock).toHaveBeenCalledTimes(1);
expect(endMock).toHaveBeenLastCalledWith('OK');
});
@@ -123,10 +123,10 @@ describe('server', () => {
response as unknown as http.ServerResponse,
);
- expect(writeHeadMock).toBeCalledTimes(1);
+ expect(writeHeadMock).toHaveBeenCalledTimes(1);
expect(writeHeadMock).toHaveBeenLastCalledWith(404);
- expect(endMock).toBeCalledTimes(1);
+ expect(endMock).toHaveBeenCalledTimes(1);
expect(endMock).toHaveBeenLastCalledWith('Not Found');
});
});
@@ -200,16 +200,16 @@ describe('server', () => {
const sendMock = jest.spyOn(ws, 'send');
const socketInstance = { ws: ws, channel: channelId, pongTs: Date.now() };
- expect(statsdIncrementMock).toBeCalledTimes(0);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(0);
server.trackClient(channelId, socketInstance);
- expect(statsdIncrementMock).toBeCalledTimes(1);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(1);
expect(statsdIncrementMock).toHaveBeenNthCalledWith(
1,
'ws_connected_client',
);
server.processStreamResults(streamReturnValue);
- expect(statsdIncrementMock).toBeCalledTimes(1);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(1);
const message1 = `{"id":"1615426152415-0","channel_id":"${channelId}","job_id":"c9b99965-8f1e-4ce5-aa43-d6fc94d6a510","user_id":"1","status":"done","errors":[],"result_url":"/superset/explore_json/data/ejr-37281682b1282cdb8f25e0de0339b386"}`;
const message2 = `{"id":"1615426152516-0","channel_id":"${channelId}","job_id":"f1e5bb1f-f2f1-4f21-9b2f-c9b91dcc9b59","user_id":"1","status":"done","errors":[],"result_url":"/api/v1/chart/data/qc-64e8452dc9907dd77746cb75a19202de"}`;
@@ -221,9 +221,9 @@ describe('server', () => {
const ws = new wsMock('localhost');
const sendMock = jest.spyOn(ws, 'send');
- expect(statsdIncrementMock).toBeCalledTimes(0);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(0);
server.processStreamResults(streamReturnValue);
- expect(statsdIncrementMock).toBeCalledTimes(0);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(0);
expect(sendMock).not.toHaveBeenCalled();
});
@@ -236,16 +236,16 @@ describe('server', () => {
const cleanChannelMock = jest.spyOn(server, 'cleanChannel');
const socketInstance = { ws: ws, channel: channelId, pongTs: Date.now() };
- expect(statsdIncrementMock).toBeCalledTimes(0);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(0);
server.trackClient(channelId, socketInstance);
- expect(statsdIncrementMock).toBeCalledTimes(1);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(1);
expect(statsdIncrementMock).toHaveBeenNthCalledWith(
1,
'ws_connected_client',
);
server.processStreamResults(streamReturnValue);
- expect(statsdIncrementMock).toBeCalledTimes(2);
+ expect(statsdIncrementMock).toHaveBeenCalledTimes(2);
expect(statsdIncrementMock).toHaveBeenNthCalledWith(
2,
'ws_client_send_error',
diff --git a/superset-websocket/utils/client-ws-app/package-lock.json b/superset-websocket/utils/client-ws-app/package-lock.json
index 22d435f46167b..8d84e04a3f9e5 100644
--- a/superset-websocket/utils/client-ws-app/package-lock.json
+++ b/superset-websocket/utils/client-ws-app/package-lock.json
@@ -8,9 +8,9 @@
"name": "client-ws-app",
"version": "0.0.0",
"dependencies": {
- "cookie-parser": "~1.4.6",
+ "cookie-parser": "~1.4.7",
"debug": "~4.3.7",
- "express": "~4.21.0",
+ "express": "~4.21.1",
"http-errors": "~2.0.0",
"jsonwebtoken": "^9.0.2",
"morgan": "~1.10.0",
@@ -242,33 +242,25 @@
}
},
"node_modules/cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==",
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
+ "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/cookie-parser": {
- "version": "1.4.6",
- "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.6.tgz",
- "integrity": "sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA==",
+ "version": "1.4.7",
+ "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz",
+ "integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==",
"dependencies": {
- "cookie": "0.4.1",
+ "cookie": "0.7.2",
"cookie-signature": "1.0.6"
},
"engines": {
"node": ">= 0.8.0"
}
},
- "node_modules/cookie-parser/node_modules/cookie": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz",
- "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA==",
- "engines": {
- "node": ">= 0.6"
- }
- },
"node_modules/cookie-signature": {
"version": "1.0.6",
"resolved": "https://registry.npmjs.org/cookie-signature/-/cookie-signature-1.0.6.tgz",
@@ -382,16 +374,16 @@
}
},
"node_modules/express": {
- "version": "4.21.0",
- "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz",
- "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==",
+ "version": "4.21.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz",
+ "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
- "cookie": "0.6.0",
+ "cookie": "0.7.1",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
@@ -422,6 +414,14 @@
"node": ">= 0.10.0"
}
},
+ "node_modules/express/node_modules/cookie": {
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
+ "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==",
+ "engines": {
+ "node": ">= 0.6"
+ }
+ },
"node_modules/express/node_modules/debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
@@ -1545,24 +1545,17 @@
"integrity": "sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA=="
},
"cookie": {
- "version": "0.6.0",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz",
- "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw=="
+ "version": "0.7.2",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
+ "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w=="
},
"cookie-parser": {
- "version": "1.4.6",
- "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.6.tgz",
- "integrity": "sha512-z3IzaNjdwUC2olLIB5/ITd0/setiaFMLYiZJle7xg5Fe9KWAceil7xszYfHHBtDFYLSgJduS2Ty0P1uJdPDJeA==",
+ "version": "1.4.7",
+ "resolved": "https://registry.npmjs.org/cookie-parser/-/cookie-parser-1.4.7.tgz",
+ "integrity": "sha512-nGUvgXnotP3BsjiLX2ypbQnWoGUPIIfHQNZkkC668ntrzGWEZVW70HDEB1qnNGMicPje6EttlIgzo51YSwNQGw==",
"requires": {
- "cookie": "0.4.1",
+ "cookie": "0.7.2",
"cookie-signature": "1.0.6"
- },
- "dependencies": {
- "cookie": {
- "version": "0.4.1",
- "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.4.1.tgz",
- "integrity": "sha512-ZwrFkGJxUR3EIoXtO+yVE69Eb7KlixbaeAWfBQB9vVsNn/o+Yw69gBWSSDK825hQNdN+wF8zELf3dFNl/kxkUA=="
- }
}
},
"cookie-signature": {
@@ -1645,16 +1638,16 @@
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg=="
},
"express": {
- "version": "4.21.0",
- "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz",
- "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==",
+ "version": "4.21.1",
+ "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz",
+ "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==",
"requires": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.3",
"content-disposition": "0.5.4",
"content-type": "~1.0.4",
- "cookie": "0.6.0",
+ "cookie": "0.7.1",
"cookie-signature": "1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
@@ -1682,6 +1675,11 @@
"vary": "~1.1.2"
},
"dependencies": {
+ "cookie": {
+ "version": "0.7.1",
+ "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz",
+ "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w=="
+ },
"debug": {
"version": "2.6.9",
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
diff --git a/superset-websocket/utils/client-ws-app/package.json b/superset-websocket/utils/client-ws-app/package.json
index 787a238513af5..adfb201c46041 100644
--- a/superset-websocket/utils/client-ws-app/package.json
+++ b/superset-websocket/utils/client-ws-app/package.json
@@ -6,9 +6,9 @@
"start": "node ./bin/www"
},
"dependencies": {
- "cookie-parser": "~1.4.6",
+ "cookie-parser": "~1.4.7",
"debug": "~4.3.7",
- "express": "~4.21.0",
+ "express": "~4.21.1",
"http-errors": "~2.0.0",
"jsonwebtoken": "^9.0.2",
"morgan": "~1.10.0",
diff --git a/superset/cachekeys/api.py b/superset/cachekeys/api.py
index 093d81b1c3f7d..365b79c423239 100644
--- a/superset/cachekeys/api.py
+++ b/superset/cachekeys/api.py
@@ -114,8 +114,8 @@ def invalidate(self) -> Response:
CacheKey.cache_key.in_(cache_keys)
)
- with db.session.begin_nested():
- db.session.execute(delete_stmt)
+ db.session.execute(delete_stmt)
+ db.session.commit() # pylint: disable=consider-using-transaction
stats_logger_manager.instance.gauge(
"invalidated_cache", len(cache_keys)
@@ -126,6 +126,7 @@ def invalidate(self) -> Response:
len(datasource_uids),
)
except SQLAlchemyError as ex: # pragma: no cover
+ db.session.rollback() # pylint: disable=consider-using-transaction
logger.error(ex, exc_info=True)
return self.response_500(str(ex))
return self.response(201)
diff --git a/superset/charts/data/api.py b/superset/charts/data/api.py
index ae88fdef5ad15..653b09896e86f 100644
--- a/superset/charts/data/api.py
+++ b/superset/charts/data/api.py
@@ -394,8 +394,13 @@ def _process_data(query_data: Any) -> Any:
)
if result_format == ChartDataResultFormat.JSON:
+ queries = result["queries"]
+ if security_manager.is_guest_user():
+ for query in queries:
+ with contextlib.suppress(KeyError):
+ del query["query"]
response_data = json.dumps(
- {"result": result["queries"]},
+ {"result": queries},
default=json.json_int_dttm_ser,
ignore_nan=True,
)
diff --git a/superset/commands/dashboard/exceptions.py b/superset/commands/dashboard/exceptions.py
index 9281119b320bf..a4b7be12611f5 100644
--- a/superset/commands/dashboard/exceptions.py
+++ b/superset/commands/dashboard/exceptions.py
@@ -58,6 +58,10 @@ class DashboardUpdateFailedError(UpdateFailedError):
message = _("Dashboard could not be updated.")
+class DashboardNativeFiltersUpdateFailedError(UpdateFailedError):
+ message = _("Dashboard native filters could not be patched.")
+
+
class DashboardDeleteFailedError(DeleteFailedError):
message = _("Dashboard could not be deleted.")
diff --git a/superset/commands/dashboard/update.py b/superset/commands/dashboard/update.py
index 031db1af31fdf..15f5e5b5841b8 100644
--- a/superset/commands/dashboard/update.py
+++ b/superset/commands/dashboard/update.py
@@ -27,6 +27,7 @@
from superset.commands.dashboard.exceptions import (
DashboardForbiddenError,
DashboardInvalidError,
+ DashboardNativeFiltersUpdateFailedError,
DashboardNotFoundError,
DashboardSlugExistsValidationError,
DashboardUpdateFailedError,
@@ -67,7 +68,6 @@ def run(self) -> Model:
dashboard,
data=json.loads(self._properties.get("json_metadata", "{}")),
)
-
return dashboard
def validate(self) -> None:
@@ -187,3 +187,18 @@ def deactivate_reports(reports_list: list[ReportSchedule]) -> None:
deleted_tabs = find_deleted_tabs()
reports = find_reports_containing_tabs(deleted_tabs)
deactivate_reports(reports)
+
+
+class UpdateDashboardNativeFiltersCommand(UpdateDashboardCommand):
+ @transaction(
+ on_error=partial(on_error, reraise=DashboardNativeFiltersUpdateFailedError)
+ )
+ def run(self) -> Model:
+ super().validate()
+ assert self._model
+
+ configuration = DashboardDAO.update_native_filters_config(
+ self._model, self._properties
+ )
+
+ return configuration
diff --git a/superset/commands/database/test_connection.py b/superset/commands/database/test_connection.py
index 8aef6c1359b5e..7330446d47ed6 100644
--- a/superset/commands/database/test_connection.py
+++ b/superset/commands/database/test_connection.py
@@ -93,7 +93,7 @@ def __init__(self, data: dict[str, Any]):
self._context = context
self._uri = uri
- def run(self) -> None: # pylint: disable=too-many-statements
+ def run(self) -> None: # pylint: disable=too-many-statements,too-many-branches
self.validate()
ex_str = ""
ssh_tunnel = self._properties.get("ssh_tunnel")
@@ -225,6 +225,10 @@ def ping(engine: Engine) -> bool:
# bubble up the exception to return proper status code
raise
except Exception as ex:
+ if database.is_oauth2_enabled() and database.db_engine_spec.needs_oauth2(
+ ex
+ ):
+ database.start_oauth2_dance()
event_logger.log_with_context(
action=get_log_connection_action(
"test_connection_error", ssh_tunnel, ex
diff --git a/superset/commands/report/create.py b/superset/commands/report/create.py
index 2a67f640022d2..9191e5a17b966 100644
--- a/superset/commands/report/create.py
+++ b/superset/commands/report/create.py
@@ -143,10 +143,17 @@ def _validate_report_extra(self, exceptions: list[ValidationError]) -> None:
position_data = json.loads(dashboard.position_json or "{}")
active_tabs = dashboard_state.get("activeTabs") or []
- anchor = dashboard_state.get("anchor")
invalid_tab_ids = set(active_tabs) - set(position_data.keys())
- if anchor and anchor not in position_data:
- invalid_tab_ids.add(anchor)
+
+ if anchor := dashboard_state.get("anchor"):
+ try:
+ anchor_list: list[str] = json.loads(anchor)
+ if _invalid_tab_ids := set(anchor_list) - set(position_data.keys()):
+ invalid_tab_ids.update(_invalid_tab_ids)
+ except json.JSONDecodeError:
+ if anchor not in position_data:
+ invalid_tab_ids.add(anchor)
+
if invalid_tab_ids:
exceptions.append(
ValidationError(
diff --git a/superset/commands/report/execute.py b/superset/commands/report/execute.py
index eadf193bf4ec0..c81750daba407 100644
--- a/superset/commands/report/execute.py
+++ b/superset/commands/report/execute.py
@@ -49,6 +49,7 @@
REPORT_SCHEDULE_ERROR_NOTIFICATION_MARKER,
ReportScheduleDAO,
)
+from superset.dashboards.permalink.types import DashboardPermalinkState
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import SupersetErrorsException, SupersetException
from superset.extensions import feature_flag_manager, machine_auth_provider_factory
@@ -206,11 +207,8 @@ def _get_url(
if (
dashboard_state := self._report_schedule.extra.get("dashboard")
) and feature_flag_manager.is_feature_enabled("ALERT_REPORT_TABS"):
- permalink_key = CreateDashboardPermalinkCommand(
- dashboard_id=str(self._report_schedule.dashboard.uuid),
- state=dashboard_state,
- ).run()
- return get_url_path("Superset.dashboard_permalink", key=permalink_key)
+ return self._get_tab_url(dashboard_state)
+
dashboard = self._report_schedule.dashboard
dashboard_id_or_slug = (
dashboard.uuid if dashboard and dashboard.uuid else dashboard.id
@@ -223,12 +221,70 @@ def _get_url(
**kwargs,
)
+ def get_dashboard_urls(
+ self, user_friendly: bool = False, **kwargs: Any
+ ) -> list[str]:
+ """
+ Retrieve the URL for the dashboard tabs, or return the dashboard URL if no tabs are available.
+ """
+ force = "true" if self._report_schedule.force_screenshot else "false"
+ if (
+ dashboard_state := self._report_schedule.extra.get("dashboard")
+ ) and feature_flag_manager.is_feature_enabled("ALERT_REPORT_TABS"):
+ if anchor := dashboard_state.get("anchor"):
+ try:
+ anchor_list: list[str] = json.loads(anchor)
+ return self._get_tabs_urls(anchor_list)
+ except json.JSONDecodeError:
+ logger.debug("Anchor value is not a list, Fall back to single tab")
+ return [self._get_tab_url(dashboard_state)]
+
+ dashboard = self._report_schedule.dashboard
+ dashboard_id_or_slug = (
+ dashboard.uuid if dashboard and dashboard.uuid else dashboard.id
+ )
+
+ return [
+ get_url_path(
+ "Superset.dashboard",
+ user_friendly=user_friendly,
+ dashboard_id_or_slug=dashboard_id_or_slug,
+ force=force,
+ **kwargs,
+ )
+ ]
+
+ def _get_tab_url(self, dashboard_state: DashboardPermalinkState) -> str:
+ """
+ Get one tab url
+ """
+ permalink_key = CreateDashboardPermalinkCommand(
+ dashboard_id=str(self._report_schedule.dashboard.uuid),
+ state=dashboard_state,
+ ).run()
+ return get_url_path("Superset.dashboard_permalink", key=permalink_key)
+
+ def _get_tabs_urls(self, tab_anchors: list[str]) -> list[str]:
+ """
+ Get multple tabs urls
+ """
+ return [
+ self._get_tab_url(
+ {
+ "anchor": tab_anchor,
+ "dataMask": None,
+ "activeTabs": None,
+ "urlParams": None,
+ }
+ )
+ for tab_anchor in tab_anchors
+ ]
+
def _get_screenshots(self) -> list[bytes]:
"""
Get chart or dashboard screenshots
:raises: ReportScheduleScreenshotFailedError
"""
- url = self._get_url()
_, username = get_executor(
executor_types=app.config["ALERT_REPORTS_EXECUTE_AS"],
model=self._report_schedule,
@@ -236,31 +292,41 @@ def _get_screenshots(self) -> list[bytes]:
user = security_manager.find_user(username)
if self._report_schedule.chart:
+ url = self._get_url()
window_width, window_height = app.config["WEBDRIVER_WINDOW"]["slice"]
window_size = (
self._report_schedule.custom_width or window_width,
self._report_schedule.custom_height or window_height,
)
- screenshot: Union[ChartScreenshot, DashboardScreenshot] = ChartScreenshot(
- url,
- self._report_schedule.chart.digest,
- window_size=window_size,
- thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"],
- )
+ screenshots: list[Union[ChartScreenshot, DashboardScreenshot]] = [
+ ChartScreenshot(
+ url,
+ self._report_schedule.chart.digest,
+ window_size=window_size,
+ thumb_size=app.config["WEBDRIVER_WINDOW"]["slice"],
+ )
+ ]
else:
+ urls = self.get_dashboard_urls()
window_width, window_height = app.config["WEBDRIVER_WINDOW"]["dashboard"]
window_size = (
self._report_schedule.custom_width or window_width,
self._report_schedule.custom_height or window_height,
)
- screenshot = DashboardScreenshot(
- url,
- self._report_schedule.dashboard.digest,
- window_size=window_size,
- thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
- )
+ screenshots = [
+ DashboardScreenshot(
+ url,
+ self._report_schedule.dashboard.digest,
+ window_size=window_size,
+ thumb_size=app.config["WEBDRIVER_WINDOW"]["dashboard"],
+ )
+ for url in urls
+ ]
try:
- image = screenshot.get_screenshot(user=user)
+ imges = []
+ for screenshot in screenshots:
+ if imge := screenshot.get_screenshot(user=user):
+ imges.append(imge)
except SoftTimeLimitExceeded as ex:
logger.warning("A timeout occurred while taking a screenshot.")
raise ReportScheduleScreenshotTimeout() from ex
@@ -268,9 +334,9 @@ def _get_screenshots(self) -> list[bytes]:
raise ReportScheduleScreenshotFailedError(
f"Failed taking a screenshot {str(ex)}"
) from ex
- if not image:
+ if not imges:
raise ReportScheduleScreenshotFailedError()
- return [image]
+ return imges
def _get_pdf(self) -> bytes:
"""
@@ -426,6 +492,7 @@ def _get_notification_content(self) -> NotificationContent:
name=self._report_schedule.name,
text=error_text,
header_data=header_data,
+ url=url,
)
if (
@@ -533,13 +600,14 @@ def send_error(self, name: str, message: str) -> None:
:raises: CommandException
"""
header_data = self._get_log_data()
+ url = self._get_url(user_friendly=True)
logger.info(
"header_data in notifications for alerts and reports %s, taskid, %s",
header_data,
self._execution_id,
)
notification_content = NotificationContent(
- name=name, text=message, header_data=header_data
+ name=name, text=message, header_data=header_data, url=url
)
# filter recipients to recipients who are also owners
diff --git a/superset/config.py b/superset/config.py
index d19e30a5a5282..d278747bfd56b 100644
--- a/superset/config.py
+++ b/superset/config.py
@@ -357,7 +357,7 @@ def _try_json_readsha(filepath: str, length: int) -> str | None:
# OPENID_PROVIDERS = [
# { 'name': 'Yahoo', 'url': 'https://open.login.yahoo.com/' },
# { 'name': 'Flickr', 'url': 'https://www.flickr.com/' },
-
+# ]
# ---------------------------------------------------
# Roles config
# ---------------------------------------------------
@@ -478,6 +478,14 @@ class D3TimeFormat(TypedDict, total=False):
"PRESTO_EXPAND_DATA": False,
# Exposes API endpoint to compute thumbnails
"THUMBNAILS": False,
+ # Enable the endpoints to cache and retrieve dashboard screenshots via webdriver.
+ # Requires configuring Celery and a cache using THUMBNAIL_CACHE_CONFIG.
+ "ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS": False,
+ # Generate screenshots (PDF or JPG) of dashboards using the web driver.
+ # When disabled, screenshots are generated on the fly by the browser.
+ # This feature flag is used by the download feature in the dashboard view.
+ # It is dependent on ENABLE_DASHBOARD_SCREENSHOT_ENDPOINT being enabled.
+ "ENABLE_DASHBOARD_DOWNLOAD_WEBDRIVER_SCREENSHOT": False,
"SHARE_QUERIES_VIA_KV_STORE": False,
"TAGGING_SYSTEM": False,
"SQLLAB_BACKEND_PERSISTENCE": True,
@@ -486,6 +494,7 @@ class D3TimeFormat(TypedDict, total=False):
"ESCAPE_MARKDOWN_HTML": False,
"DASHBOARD_CROSS_FILTERS": True, # deprecated
"DASHBOARD_VIRTUALIZATION": True,
+ # This feature flag is stil in beta and is not recommended for production use.
"GLOBAL_ASYNC_QUERIES": False,
"EMBEDDED_SUPERSET": False,
# Enables Alerts and reports new implementation
diff --git a/superset/connectors/sqla/models.py b/superset/connectors/sqla/models.py
index e8aa0d705b8f8..fb7409adba589 100644
--- a/superset/connectors/sqla/models.py
+++ b/superset/connectors/sqla/models.py
@@ -116,7 +116,6 @@
)
from superset.utils import core as utils, json
from superset.utils.backports import StrEnum
-from superset.utils.core import GenericDataType, MediumText
config = app.config
metadata = Model.metadata # pylint: disable=no-member
@@ -477,7 +476,7 @@ def data_for_slices( # pylint: disable=too-many-locals
]
filtered_columns: list[Column] = []
- column_types: set[GenericDataType] = set()
+ column_types: set[utils.GenericDataType] = set()
for column_ in data["columns"]:
generic_type = column_.get("type_generic")
if generic_type is not None:
@@ -511,7 +510,7 @@ def data_for_slices( # pylint: disable=too-many-locals
def filter_values_handler( # pylint: disable=too-many-arguments
values: FilterValues | None,
operator: str,
- target_generic_type: GenericDataType,
+ target_generic_type: utils.GenericDataType,
target_native_type: str | None = None,
is_list_target: bool = False,
db_engine_spec: builtins.type[BaseEngineSpec] | None = None,
@@ -829,10 +828,10 @@ class TableColumn(AuditMixinNullable, ImportExportMixin, CertificationMixin, Mod
advanced_data_type = Column(String(255))
groupby = Column(Boolean, default=True)
filterable = Column(Boolean, default=True)
- description = Column(MediumText())
+ description = Column(utils.MediumText())
table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE"))
is_dttm = Column(Boolean, default=False)
- expression = Column(MediumText())
+ expression = Column(utils.MediumText())
python_date_format = Column(String(255))
extra = Column(Text)
@@ -892,21 +891,21 @@ def is_boolean(self) -> bool:
"""
Check if the column has a boolean datatype.
"""
- return self.type_generic == GenericDataType.BOOLEAN
+ return self.type_generic == utils.GenericDataType.BOOLEAN
@property
def is_numeric(self) -> bool:
"""
Check if the column has a numeric datatype.
"""
- return self.type_generic == GenericDataType.NUMERIC
+ return self.type_generic == utils.GenericDataType.NUMERIC
@property
def is_string(self) -> bool:
"""
Check if the column has a string datatype.
"""
- return self.type_generic == GenericDataType.STRING
+ return self.type_generic == utils.GenericDataType.STRING
@property
def is_temporal(self) -> bool:
@@ -918,7 +917,7 @@ def is_temporal(self) -> bool:
"""
if self.is_dttm is not None:
return self.is_dttm
- return self.type_generic == GenericDataType.TEMPORAL
+ return self.type_generic == utils.GenericDataType.TEMPORAL
@property
def database(self) -> Database:
@@ -935,7 +934,7 @@ def db_extra(self) -> dict[str, Any]:
@property
def type_generic(self) -> utils.GenericDataType | None:
if self.is_dttm:
- return GenericDataType.TEMPORAL
+ return utils.GenericDataType.TEMPORAL
return (
column_spec.generic_type
@@ -1038,12 +1037,12 @@ class SqlMetric(AuditMixinNullable, ImportExportMixin, CertificationMixin, Model
metric_name = Column(String(255), nullable=False)
verbose_name = Column(String(1024))
metric_type = Column(String(32))
- description = Column(MediumText())
+ description = Column(utils.MediumText())
d3format = Column(String(128))
currency = Column(String(128))
warning_text = Column(Text)
table_id = Column(Integer, ForeignKey("tables.id", ondelete="CASCADE"))
- expression = Column(MediumText(), nullable=False)
+ expression = Column(utils.MediumText(), nullable=False)
extra = Column(Text)
table: Mapped[SqlaTable] = relationship(
@@ -1185,7 +1184,7 @@ class SqlaTable(
)
schema = Column(String(255))
catalog = Column(String(256), nullable=True, default=None)
- sql = Column(MediumText())
+ sql = Column(utils.MediumText())
is_sqllab_view = Column(Boolean, default=False)
template_params = Column(Text)
extra = Column(Text)
@@ -1980,6 +1979,26 @@ def has_extra_cache_key_calls(self, query_obj: QueryObjectDict) -> bool:
templatable_statements.append(extras["where"])
if "having" in extras:
templatable_statements.append(extras["having"])
+ if columns := query_obj.get("columns"):
+ calculated_columns: dict[str, Any] = {
+ c.column_name: c.expression for c in self.columns if c.expression
+ }
+ for column_ in columns:
+ if utils.is_adhoc_column(column_):
+ templatable_statements.append(column_["sqlExpression"])
+ elif isinstance(column_, str) and column_ in calculated_columns:
+ templatable_statements.append(calculated_columns[column_])
+ if metrics := query_obj.get("metrics"):
+ metrics_by_name: dict[str, Any] = {
+ m.metric_name: m.expression for m in self.metrics
+ }
+ for metric in metrics:
+ if utils.is_adhoc_metric(metric) and (
+ sql := metric.get("sqlExpression")
+ ):
+ templatable_statements.append(sql)
+ elif isinstance(metric, str) and metric in metrics_by_name:
+ templatable_statements.append(metrics_by_name[metric])
if self.is_rls_supported:
templatable_statements += [
f.clause for f in security_manager.get_rls_filters(self)
@@ -2121,4 +2140,4 @@ class RowLevelSecurityFilter(Model, AuditMixinNullable):
secondary=RLSFilterTables,
backref="row_level_security_filters",
)
- clause = Column(MediumText(), nullable=False)
+ clause = Column(utils.MediumText(), nullable=False)
diff --git a/superset/constants.py b/superset/constants.py
index d233f271c6569..f6cf2a115754e 100644
--- a/superset/constants.py
+++ b/superset/constants.py
@@ -173,6 +173,7 @@ class RouteMethod: # pylint: disable=too-few-public-methods
"columnar_metadata": "columnar_upload",
"csv_metadata": "csv_upload",
"slack_channels": "write",
+ "put_filters": "write",
}
EXTRA_FORM_DATA_APPEND_KEYS = {
diff --git a/superset/daos/dashboard.py b/superset/daos/dashboard.py
index 8196c197b2487..5199c4a0ca47d 100644
--- a/superset/daos/dashboard.py
+++ b/superset/daos/dashboard.py
@@ -28,6 +28,7 @@
DashboardAccessDeniedError,
DashboardForbiddenError,
DashboardNotFoundError,
+ DashboardUpdateFailedError,
)
from superset.daos.base import BaseDAO
from superset.dashboards.filters import DashboardAccessFilter, is_uuid
@@ -318,6 +319,77 @@ def copy_dashboard(
db.session.add(dash)
return dash
+ @classmethod
+ def update_native_filters_config(
+ cls,
+ dashboard: Dashboard | None = None,
+ attributes: dict[str, Any] | None = None,
+ ) -> list[dict[str, Any]]:
+ if not dashboard:
+ raise DashboardUpdateFailedError("Dashboard not found")
+
+ if attributes:
+ metadata = json.loads(dashboard.json_metadata or "{}")
+ native_filter_configuration = metadata.get(
+ "native_filter_configuration", []
+ )
+ reordered_filter_ids: list[int] = attributes.get("reordered", [])
+ updated_configuration = []
+
+ # Modify / Delete existing filters
+ for conf in native_filter_configuration:
+ deleted_filter = next(
+ (f for f in attributes.get("deleted", []) if f == conf.get("id")),
+ None,
+ )
+ if deleted_filter:
+ continue
+
+ modified_filter = next(
+ (
+ f
+ for f in attributes.get("modified", [])
+ if f.get("id") == conf.get("id")
+ ),
+ None,
+ )
+ if modified_filter:
+ # Filter was modified, substitute it
+ updated_configuration.append(modified_filter)
+ else:
+ # Filter was not modified, keep it as is
+ updated_configuration.append(conf)
+
+ # Append new filters
+ for new_filter in attributes.get("modified", []):
+ new_filter_id = new_filter.get("id")
+ if new_filter_id not in [f.get("id") for f in updated_configuration]:
+ updated_configuration.append(new_filter)
+
+ if (
+ reordered_filter_ids
+ and new_filter_id not in reordered_filter_ids
+ ):
+ reordered_filter_ids.append(new_filter_id)
+
+ # Reorder filters
+ if reordered_filter_ids:
+ filter_map = {
+ filter_config["id"]: filter_config
+ for filter_config in updated_configuration
+ }
+
+ updated_configuration = [
+ filter_map[filter_id]
+ for filter_id in reordered_filter_ids
+ if filter_id in filter_map
+ ]
+
+ metadata["native_filter_configuration"] = updated_configuration
+ dashboard.json_metadata = json.dumps(metadata)
+
+ return updated_configuration
+
@staticmethod
def add_favorite(dashboard: Dashboard) -> None:
ids = DashboardDAO.favorited_ids([dashboard])
diff --git a/superset/dashboards/api.py b/superset/dashboards/api.py
index cb7e30ef02838..a752091cc18e6 100644
--- a/superset/dashboards/api.py
+++ b/superset/dashboards/api.py
@@ -47,6 +47,7 @@
DashboardDeleteFailedError,
DashboardForbiddenError,
DashboardInvalidError,
+ DashboardNativeFiltersUpdateFailedError,
DashboardNotFoundError,
DashboardUpdateFailedError,
)
@@ -55,7 +56,10 @@
from superset.commands.dashboard.importers.dispatcher import ImportDashboardsCommand
from superset.commands.dashboard.permalink.create import CreateDashboardPermalinkCommand
from superset.commands.dashboard.unfave import DelFavoriteDashboardCommand
-from superset.commands.dashboard.update import UpdateDashboardCommand
+from superset.commands.dashboard.update import (
+ UpdateDashboardCommand,
+ UpdateDashboardNativeFiltersCommand,
+)
from superset.commands.database.exceptions import DatasetValidationError
from superset.commands.exceptions import TagForbiddenError
from superset.commands.importers.exceptions import NoValidFilesFoundError
@@ -80,6 +84,7 @@
DashboardCopySchema,
DashboardDatasetSchema,
DashboardGetResponseSchema,
+ DashboardNativeFiltersConfigUpdateSchema,
DashboardPostSchema,
DashboardPutSchema,
EmbeddedDashboardConfigSchema,
@@ -151,12 +156,18 @@ def wraps(self: BaseSupersetModelRestApi, id_or_slug: str) -> Response:
class DashboardRestApi(BaseSupersetModelRestApi):
datamodel = SQLAInterface(Dashboard)
- @before_request(only=["thumbnail"])
+ @before_request(only=["thumbnail", "cache_dashboard_screenshot", "screenshot"])
def ensure_thumbnails_enabled(self) -> Optional[Response]:
if not is_feature_enabled("THUMBNAILS"):
return self.response_404()
return None
+ @before_request(only=["cache_dashboard_screenshot", "screenshot"])
+ def ensure_screenshots_enabled(self) -> Optional[Response]:
+ if not is_feature_enabled("ENABLE_DASHBOARD_SCREENSHOT_ENDPOINTS"):
+ return self.response_404()
+ return None
+
include_route_methods = RouteMethod.REST_MODEL_VIEW_CRUD_SET | {
RouteMethod.EXPORT,
RouteMethod.IMPORT,
@@ -175,6 +186,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]:
"copy_dash",
"cache_dashboard_screenshot",
"screenshot",
+ "put_filters",
}
resource_name = "dashboard"
allow_browser_login = True
@@ -262,6 +274,7 @@ def ensure_thumbnails_enabled(self) -> Optional[Response]:
add_model_schema = DashboardPostSchema()
edit_model_schema = DashboardPutSchema()
+ update_filters_model_schema = DashboardNativeFiltersConfigUpdateSchema()
chart_entity_response_schema = ChartEntityResponseSchema()
dashboard_get_response_schema = DashboardGetResponseSchema()
dashboard_dataset_schema = DashboardDatasetSchema()
@@ -675,6 +688,85 @@ def put(self, pk: int) -> Response:
response = self.response_422(message=str(ex))
return response
+ @expose("//filters", methods=("PUT",))
+ @protect()
+ @safe
+ @statsd_metrics
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.put_filters",
+ log_to_statsd=False,
+ )
+ @requires_json
+ def put_filters(self, pk: int) -> Response:
+ """
+ Modify native filters configuration for a dashboard.
+ ---
+ put:
+ summary: Update native filters configuration for a dashboard.
+ parameters:
+ - in: path
+ schema:
+ type: integer
+ name: pk
+ requestBody:
+ description: Native filters configuration
+ required: true
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/DashboardNativeFiltersConfigUpdateSchema'
+ responses:
+ 200:
+ description: Dashboard native filters updated
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ result:
+ type: array
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 403:
+ $ref: '#/components/responses/403'
+ 404:
+ $ref: '#/components/responses/404'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ try:
+ item = self.update_filters_model_schema.load(request.json, partial=True)
+ except ValidationError as error:
+ return self.response_400(message=error.messages)
+
+ try:
+ configuration = UpdateDashboardNativeFiltersCommand(pk, item).run()
+ response = self.response(
+ 200,
+ result=configuration,
+ )
+ except DashboardNotFoundError:
+ response = self.response_404()
+ except DashboardForbiddenError:
+ response = self.response_403()
+ except TagForbiddenError as ex:
+ response = self.response(403, message=str(ex))
+ except DashboardInvalidError as ex:
+ return self.response_422(message=ex.normalized_messages())
+ except DashboardNativeFiltersUpdateFailedError as ex:
+ logger.error(
+ "Error changing native filters for dashboard %s: %s",
+ self.__class__.__name__,
+ str(ex),
+ exc_info=True,
+ )
+ response = self.response_422(message=str(ex))
+ return response
+
@expose("/", methods=("DELETE",))
@protect()
@safe
@@ -1039,13 +1131,15 @@ def trigger_celery() -> WerkzeugResponse:
logger.info("Triggering screenshot ASYNC")
cache_dashboard_screenshot.delay(
username=get_current_user(),
- guest_token=g.user.guest_token
- if get_current_user() and isinstance(g.user, GuestUser)
- else None,
+ guest_token=(
+ g.user.guest_token
+ if get_current_user() and isinstance(g.user, GuestUser)
+ else None
+ ),
dashboard_id=dashboard.id,
dashboard_url=dashboard_url,
cache_key=cache_key,
- force=True,
+ force=False,
thumb_size=thumb_size,
window_size=window_size,
)
@@ -1509,15 +1603,16 @@ def set_embedded(self, dashboard: Dashboard) -> Response:
try:
body = self.embedded_config_schema.load(request.json)
- with db.session.begin_nested():
- embedded = EmbeddedDashboardDAO.upsert(
- dashboard,
- body["allowed_domains"],
- )
+ embedded = EmbeddedDashboardDAO.upsert(
+ dashboard,
+ body["allowed_domains"],
+ )
+ db.session.commit() # pylint: disable=consider-using-transaction
result = self.embedded_response_schema.dump(embedded)
return self.response(200, result=result)
except ValidationError as error:
+ db.session.rollback() # pylint: disable=consider-using-transaction
return self.response_400(message=error.messages)
@expose("//embedded", methods=("DELETE",))
diff --git a/superset/dashboards/schemas.py b/superset/dashboards/schemas.py
index d63e79336c377..5edcda68e34b3 100644
--- a/superset/dashboards/schemas.py
+++ b/superset/dashboards/schemas.py
@@ -399,6 +399,12 @@ class DashboardPutSchema(BaseDashboardSchema):
)
+class DashboardNativeFiltersConfigUpdateSchema(BaseDashboardSchema):
+ deleted = fields.List(fields.String(), allow_none=False)
+ modified = fields.List(fields.Raw(), allow_none=False)
+ reordered = fields.List(fields.String(), allow_none=False)
+
+
class DashboardScreenshotPostSchema(Schema):
dataMask = fields.Dict(
keys=fields.Str(),
diff --git a/superset/databases/api.py b/superset/databases/api.py
index 88188bed57b90..542daa93ae668 100644
--- a/superset/databases/api.py
+++ b/superset/databases/api.py
@@ -121,6 +121,7 @@
from superset.superset_typing import FlaskResponse
from superset.utils import json
from superset.utils.core import error_msg_from_exception, parse_js_uri_path_item
+from superset.utils.decorators import transaction
from superset.utils.oauth2 import decode_oauth2_state
from superset.utils.ssh_tunnel import mask_password_info
from superset.views.base_api import (
@@ -1341,6 +1342,7 @@ def validate_sql(self, pk: int) -> FlaskResponse:
return self.response_404()
@expose("/oauth2/", methods=["GET"])
+ @transaction()
@event_logger.log_this_with_context(
action=lambda self, *args, **kwargs: f"{self.__class__.__name__}.oauth2",
log_to_statsd=True,
@@ -1428,7 +1430,6 @@ def oauth2(self) -> FlaskResponse:
"refresh_token": token_response.get("refresh_token"),
},
)
-
# return blank page that closes itself
return make_response(
render_template("superset/oauth2.html", tab_id=state["tab_id"]),
diff --git a/superset/databases/schemas.py b/superset/databases/schemas.py
index 27eb043eb131b..ed4e67d3041ea 100644
--- a/superset/databases/schemas.py
+++ b/superset/databases/schemas.py
@@ -47,6 +47,11 @@
SSHTunnelMissingCredentials,
)
from superset.constants import PASSWORD_MASK
+from superset.databases.types import ( # pylint:disable=unused-import
+ EncryptedDict, # noqa: F401
+ EncryptedField,
+ EncryptedString, # noqa: F401
+)
from superset.databases.utils import make_url_safe
from superset.db_engine_specs import get_engine_spec
from superset.exceptions import CertificateException, SupersetSecurityException
@@ -941,20 +946,6 @@ def validate_ssh_tunnel_credentials(
return
-class EncryptedField: # pylint: disable=too-few-public-methods
- """
- A database field that should be stored in encrypted_extra.
- """
-
-
-class EncryptedString(EncryptedField, fields.String):
- pass
-
-
-class EncryptedDict(EncryptedField, fields.Dict):
- pass
-
-
def encrypted_field_properties(self, field: Any, **_) -> dict[str, Any]: # type: ignore
ret = {}
if isinstance(field, EncryptedField):
diff --git a/superset/databases/types.py b/superset/databases/types.py
new file mode 100644
index 0000000000000..4ab4428604d93
--- /dev/null
+++ b/superset/databases/types.py
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Field has been moved outside of the schemas.py file to
+# allow for it to be imported from outside of app_context
+from marshmallow import fields
+
+
+class EncryptedField: # pylint: disable=too-few-public-methods
+ """
+ A database field that should be stored in encrypted_extra.
+ """
+
+
+class EncryptedString(EncryptedField, fields.String):
+ pass
+
+
+class EncryptedDict(EncryptedField, fields.Dict):
+ pass
diff --git a/superset/datasets/api.py b/superset/datasets/api.py
index f8f6bdc0b9604..762727aafcdff 100644
--- a/superset/datasets/api.py
+++ b/superset/datasets/api.py
@@ -15,16 +15,24 @@
# specific language governing permissions and limitations
# under the License.
# pylint: disable=too-many-lines
+from __future__ import annotations
+
import logging
from datetime import datetime
from io import BytesIO
-from typing import Any
+from typing import Any, Callable
from zipfile import is_zipfile, ZipFile
from flask import request, Response, send_file
from flask_appbuilder.api import expose, protect, rison, safe
+from flask_appbuilder.api.schemas import get_item_schema
+from flask_appbuilder.const import (
+ API_RESULT_RES_KEY,
+ API_SELECT_COLUMNS_RIS_KEY,
+)
from flask_appbuilder.models.sqla.interface import SQLAInterface
from flask_babel import ngettext
+from jinja2.exceptions import TemplateSyntaxError
from marshmallow import ValidationError
from superset import event_logger
@@ -65,6 +73,8 @@
GetOrCreateDatasetSchema,
openapi_spec_methods_override,
)
+from superset.exceptions import SupersetTemplateException
+from superset.jinja_context import BaseTemplateProcessor, get_template_processor
from superset.utils import json
from superset.utils.core import parse_boolean_string
from superset.views.base import DatasourceFilter
@@ -75,6 +85,7 @@
requires_json,
statsd_metrics,
)
+from superset.views.error_handling import handle_api_exception
from superset.views.filters import BaseFilterRelatedUsers, FilterRelatedOwners
logger = logging.getLogger(__name__)
@@ -1056,3 +1067,140 @@ def warm_up_cache(self) -> Response:
return self.response(200, result=result)
except CommandException as ex:
return self.response(ex.status, message=ex.message)
+
+ @expose("/", methods=("GET",))
+ @protect()
+ @safe
+ @rison(get_item_schema)
+ @statsd_metrics
+ @handle_api_exception
+ @event_logger.log_this_with_context(
+ action=lambda self, *args, **kwargs: f"{self.__class__.__name__}" f".get",
+ log_to_statsd=False,
+ )
+ def get(self, pk: int, **kwargs: Any) -> Response:
+ """Get a dataset.
+ ---
+ get:
+ summary: Get a dataset
+ description: Get a dataset by ID
+ parameters:
+ - in: path
+ schema:
+ type: integer
+ description: The dataset ID
+ name: pk
+ - in: query
+ name: q
+ content:
+ application/json:
+ schema:
+ $ref: '#/components/schemas/get_item_schema'
+ - in: query
+ name: include_rendered_sql
+ description: >-
+ Should Jinja macros from sql, metrics and columns be rendered
+ and included in the response
+ schema:
+ type: boolean
+ responses:
+ 200:
+ description: Dataset object has been returned.
+ content:
+ application/json:
+ schema:
+ type: object
+ properties:
+ id:
+ description: The item id
+ type: string
+ result:
+ $ref: '#/components/schemas/{{self.__class__.__name__}}.get'
+ 400:
+ $ref: '#/components/responses/400'
+ 401:
+ $ref: '#/components/responses/401'
+ 422:
+ $ref: '#/components/responses/422'
+ 500:
+ $ref: '#/components/responses/500'
+ """
+ item: SqlaTable | None = self.datamodel.get(
+ pk,
+ self._base_filters,
+ self.show_select_columns,
+ self.show_outer_default_load,
+ )
+ if not item:
+ return self.response_404()
+
+ response: dict[str, Any] = {}
+ args = kwargs.get("rison", {})
+ select_cols = args.get(API_SELECT_COLUMNS_RIS_KEY, [])
+ pruned_select_cols = [col for col in select_cols if col in self.show_columns]
+ self.set_response_key_mappings(
+ response,
+ self.get,
+ args,
+ **{API_SELECT_COLUMNS_RIS_KEY: pruned_select_cols},
+ )
+ if pruned_select_cols:
+ show_model_schema = self.model2schemaconverter.convert(pruned_select_cols)
+ else:
+ show_model_schema = self.show_model_schema
+
+ response["id"] = pk
+ response[API_RESULT_RES_KEY] = show_model_schema.dump(item, many=False)
+
+ if parse_boolean_string(request.args.get("include_rendered_sql")):
+ try:
+ processor = get_template_processor(database=item.database)
+ response["result"] = self.render_dataset_fields(
+ response["result"], processor
+ )
+ except SupersetTemplateException as ex:
+ return self.response_400(message=str(ex))
+ return self.response(200, **response)
+
+ @staticmethod
+ def render_dataset_fields(
+ data: dict[str, Any], processor: BaseTemplateProcessor
+ ) -> dict[str, Any]:
+ """
+ Renders Jinja macros in the ``sql``, ``metrics`` and ``columns`` fields.
+
+ :param data: Dataset info to be rendered
+ :param processor: A ``TemplateProcessor`` instance
+ :return: Rendered dataset data
+ """
+
+ def render_item_list(item_list: list[dict[str, Any]]) -> list[dict[str, Any]]:
+ return [
+ {
+ **item,
+ "rendered_expression": processor.process_template(
+ item["expression"]
+ ),
+ }
+ if item.get("expression")
+ else item
+ for item in item_list
+ ]
+
+ items: list[tuple[str, str, str, Callable[[Any], Any]]] = [
+ ("query", "sql", "rendered_sql", processor.process_template),
+ ("metric", "metrics", "metrics", render_item_list),
+ ("calculated column", "columns", "columns", render_item_list),
+ ]
+ for item_type, key, new_key, func in items:
+ if not data.get(key):
+ continue
+
+ try:
+ data[new_key] = func(data[key])
+ except TemplateSyntaxError as ex:
+ raise SupersetTemplateException(
+ f"Unable to render expression from dataset {item_type}.",
+ ) from ex
+
+ return data
diff --git a/superset/datasets/schemas.py b/superset/datasets/schemas.py
index 5b899d8402f23..4b7e92d7ff5bb 100644
--- a/superset/datasets/schemas.py
+++ b/superset/datasets/schemas.py
@@ -29,7 +29,6 @@
get_export_ids_schema = {"type": "array", "items": {"type": "integer"}}
openapi_spec_methods_override = {
- "get": {"get": {"summary": "Get a dataset detail information"}},
"get_list": {
"get": {
"summary": "Get a list of datasets",
diff --git a/superset/db_engine_specs/base.py b/superset/db_engine_specs/base.py
index dcdfff6c3f242..8cabb1e5893ea 100644
--- a/superset/db_engine_specs/base.py
+++ b/superset/db_engine_specs/base.py
@@ -58,8 +58,8 @@
from sqlalchemy.types import TypeEngine
from sqlparse.tokens import CTE
-from superset import sql_parse
-from superset.constants import TimeGrain as TimeGrainConstants
+from superset import db, sql_parse
+from superset.constants import QUERY_CANCEL_KEY, TimeGrain as TimeGrainConstants
from superset.databases.utils import get_table_metadata, make_url_safe
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
from superset.exceptions import DisallowedSQLFunction, OAuth2Error, OAuth2RedirectError
@@ -433,10 +433,19 @@ class BaseEngineSpec: # pylint: disable=too-many-public-methods
oauth2_scope = ""
oauth2_authorization_request_uri: str | None = None # pylint: disable=invalid-name
oauth2_token_request_uri: str | None = None
+ oauth2_token_request_type = "data"
# Driver-specific exception that should be mapped to OAuth2RedirectError
oauth2_exception = OAuth2RedirectError
+ # Does the query id related to the connection?
+ # The default value is True, which means that the query id is determined when
+ # the connection is created.
+ # When this is changed to false in a DB engine spec it means the query id
+ # is determined only after the specific query is executed and it will update
+ # the `cancel_query` value in the `extra` field of the `query` object
+ has_query_id_before_execute = True
+
@classmethod
def is_oauth2_enabled(cls) -> bool:
return (
@@ -517,6 +526,9 @@ def get_oauth2_config(cls) -> OAuth2ClientConfig | None:
"token_request_uri",
cls.oauth2_token_request_uri,
),
+ "request_content_type": db_engine_spec_config.get(
+ "request_content_type", cls.oauth2_token_request_type
+ ),
}
return config
@@ -554,18 +566,16 @@ def get_oauth2_token(
"""
timeout = current_app.config["DATABASE_OAUTH2_TIMEOUT"].total_seconds()
uri = config["token_request_uri"]
- response = requests.post(
- uri,
- json={
- "code": code,
- "client_id": config["id"],
- "client_secret": config["secret"],
- "redirect_uri": config["redirect_uri"],
- "grant_type": "authorization_code",
- },
- timeout=timeout,
- )
- return response.json()
+ req_body = {
+ "code": code,
+ "client_id": config["id"],
+ "client_secret": config["secret"],
+ "redirect_uri": config["redirect_uri"],
+ "grant_type": "authorization_code",
+ }
+ if config["request_content_type"] == "data":
+ return requests.post(uri, data=req_body, timeout=timeout).json()
+ return requests.post(uri, json=req_body, timeout=timeout).json()
@classmethod
def get_oauth2_fresh_token(
@@ -578,17 +588,15 @@ def get_oauth2_fresh_token(
"""
timeout = current_app.config["DATABASE_OAUTH2_TIMEOUT"].total_seconds()
uri = config["token_request_uri"]
- response = requests.post(
- uri,
- json={
- "client_id": config["id"],
- "client_secret": config["secret"],
- "refresh_token": refresh_token,
- "grant_type": "refresh_token",
- },
- timeout=timeout,
- )
- return response.json()
+ req_body = {
+ "client_id": config["id"],
+ "client_secret": config["secret"],
+ "refresh_token": refresh_token,
+ "grant_type": "refresh_token",
+ }
+ if config["request_content_type"] == "data":
+ return requests.post(uri, data=req_body, timeout=timeout).json()
+ return requests.post(uri, json=req_body, timeout=timeout).json()
@classmethod
def get_allows_alias_in_select(
@@ -1316,6 +1324,7 @@ def handle_cursor(cls, cursor: Any, query: Query) -> None:
# TODO: Fix circular import error caused by importing sql_lab.Query
@classmethod
+ # pylint: disable=consider-using-transaction
def execute_with_cursor(
cls,
cursor: Any,
@@ -1333,6 +1342,13 @@ def execute_with_cursor(
"""
logger.debug("Query %d: Running query: %s", query.id, sql)
cls.execute(cursor, sql, query.database, async_=True)
+ if not cls.has_query_id_before_execute:
+ cancel_query_id = query.database.db_engine_spec.get_cancel_query_id(
+ cursor, query
+ )
+ if cancel_query_id is not None:
+ query.set_extra_json_key(QUERY_CANCEL_KEY, cancel_query_id)
+ db.session.commit()
logger.debug("Query %d: Handling cursor", query.id)
cls.handle_cursor(cursor, query)
@@ -1691,10 +1707,13 @@ def select_star( # pylint: disable=too-many-arguments
return sql
@classmethod
- def estimate_statement_cost(cls, statement: str, cursor: Any) -> dict[str, Any]:
+ def estimate_statement_cost(
+ cls, database: Database, statement: str, cursor: Any
+ ) -> dict[str, Any]:
"""
Generate a SQL query that estimates the cost of a given statement.
+ :param database: A Database object
:param statement: A single SQL statement
:param cursor: Cursor instance
:return: Dictionary with different costs
@@ -1765,6 +1784,7 @@ def estimate_query_cost( # pylint: disable=too-many-arguments
cursor = conn.cursor()
return [
cls.estimate_statement_cost(
+ database,
cls.process_statement(statement, database),
cursor,
)
@@ -1793,8 +1813,9 @@ def get_url_for_impersonation(
return url
@classmethod
- def update_impersonation_config(
+ def update_impersonation_config( # pylint: disable=too-many-arguments
cls,
+ database: Database,
connect_args: dict[str, Any],
uri: str,
username: str | None,
@@ -1804,6 +1825,7 @@ def update_impersonation_config(
Update a configuration dictionary
that can set the correct properties for impersonating users
+ :param connect_args: a Database object
:param connect_args: config to be updated
:param uri: URI
:param username: Effective username
diff --git a/superset/db_engine_specs/bigquery.py b/superset/db_engine_specs/bigquery.py
index 11175d7957445..70bc4bc845390 100644
--- a/superset/db_engine_specs/bigquery.py
+++ b/superset/db_engine_specs/bigquery.py
@@ -409,7 +409,11 @@ def df_to_sql(
pandas_gbq.to_gbq(df, **to_gbq_kwargs)
@classmethod
- def _get_client(cls, engine: Engine) -> bigquery.Client:
+ def _get_client(
+ cls,
+ engine: Engine,
+ database: Database, # pylint: disable=unused-argument
+ ) -> bigquery.Client:
"""
Return the BigQuery client associated with an engine.
"""
@@ -453,7 +457,7 @@ def estimate_query_cost( # pylint: disable=too-many-arguments
catalog=catalog,
schema=schema,
) as engine:
- client = cls._get_client(engine)
+ client = cls._get_client(engine, database)
return [
cls.custom_estimate_statement_cost(
cls.process_statement(statement, database),
@@ -477,7 +481,7 @@ def get_default_catalog(cls, database: Database) -> str | None:
return project
with database.get_sqla_engine() as engine:
- client = cls._get_client(engine)
+ client = cls._get_client(engine, database)
return client.project
@classmethod
@@ -493,7 +497,7 @@ def get_catalog_names(
"""
engine: Engine
with database.get_sqla_engine() as engine:
- client = cls._get_client(engine)
+ client = cls._get_client(engine, database)
projects = client.list_projects()
return {project.project_id for project in projects}
diff --git a/superset/db_engine_specs/denodo.py b/superset/db_engine_specs/denodo.py
new file mode 100644
index 0000000000000..2de528260f864
--- /dev/null
+++ b/superset/db_engine_specs/denodo.py
@@ -0,0 +1,158 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import re
+from datetime import datetime
+from typing import Any, Optional
+
+from sqlalchemy.types import Date, DateTime
+
+from superset.db_engine_specs.base import BaseEngineSpec, BasicParametersMixin
+from superset.errors import SupersetErrorType
+
+
+# Internal class for defining error message patterns (for translation)
+class _ErrorPatterns: # pylint: disable=too-few-public-methods
+ CONN_INVALID_USER_PWD_REGEX = re.compile("The username or password is incorrect")
+ CONN_INVALID_PWD_NEEDED_REGEX = re.compile("no password supplied")
+ CONN_INVALID_HOSTNAME_REGEX = re.compile(
+ 'could not translate host name "(?P.*?)" to address: '
+ )
+ CONN_PORT_CLOSED_REGEX = re.compile(
+ "Is the server running on that host and accepting"
+ )
+ CONN_UNKNOWN_DATABASE_REGEX = re.compile("Database '(?P.*?)' not found")
+ CONN_FORBIDDEN_DATABASE_REGEX = re.compile(
+ "Insufficient privileges to connect to the database '(?P.*?)'"
+ )
+ QUERY_SYNTAX_ERROR_REGEX = re.compile("Exception parsing query near '(?P.*?)'")
+ QUERY_COLUMN_NOT_EXIST_REGEX = re.compile(
+ "Field not found '(?P.*?)' in view '(?P.*?)'"
+ )
+ QUERY_GROUPBY_ERROR_REGEX = re.compile(
+ "Error computing capabilities of GROUP BY view"
+ )
+ QUERY_GROUPBY_CANT_PROJ_REGEX = re.compile(
+ "Invalid GROUP BY expression. '(?P.*?)' cannot be projected"
+ )
+
+
+class DenodoEngineSpec(BaseEngineSpec, BasicParametersMixin):
+ engine = "denodo"
+ engine_name = "Denodo"
+
+ default_driver = "psycopg2"
+ sqlalchemy_uri_placeholder = (
+ "denodo://user:password@host:port/dbname[?key=value&key=value...]"
+ )
+ encryption_parameters = {"sslmode": "require"}
+
+ _time_grain_expressions = {
+ None: "{col}",
+ "PT1M": "TRUNC({col},'MI')",
+ "PT1H": "TRUNC({col},'HH')",
+ "P1D": "TRUNC({col},'DDD')",
+ "P1W": "TRUNC({col},'W')",
+ "P1M": "TRUNC({col},'MONTH')",
+ "P3M": "TRUNC({col},'Q')",
+ "P1Y": "TRUNC({col},'YEAR')",
+ }
+
+ custom_errors: dict[
+ re.Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]
+ ] = {
+ _ErrorPatterns.CONN_INVALID_USER_PWD_REGEX: (
+ "Incorrect username or password.",
+ SupersetErrorType.CONNECTION_INVALID_USERNAME_ERROR,
+ {"invalid": ["username", "password"]},
+ ),
+ _ErrorPatterns.CONN_INVALID_PWD_NEEDED_REGEX: (
+ "Please enter a password.",
+ SupersetErrorType.CONNECTION_ACCESS_DENIED_ERROR,
+ {"invalid": ["password"]},
+ ),
+ _ErrorPatterns.CONN_INVALID_HOSTNAME_REGEX: (
+ 'Hostname "%(hostname)s" cannot be resolved.',
+ SupersetErrorType.CONNECTION_INVALID_HOSTNAME_ERROR,
+ {"invalid": ["host"]},
+ ),
+ _ErrorPatterns.CONN_PORT_CLOSED_REGEX: (
+ "Server refused the connection: check hostname and port.",
+ SupersetErrorType.CONNECTION_PORT_CLOSED_ERROR,
+ {"invalid": ["host", "port"]},
+ ),
+ _ErrorPatterns.CONN_UNKNOWN_DATABASE_REGEX: (
+ 'Unable to connect to database "%(database)s"',
+ SupersetErrorType.CONNECTION_UNKNOWN_DATABASE_ERROR,
+ {"invalid": ["database"]},
+ ),
+ _ErrorPatterns.CONN_FORBIDDEN_DATABASE_REGEX: (
+ 'Unable to connect to database "%(database)s": database does not '
+ "exist or insufficient permissions",
+ SupersetErrorType.CONNECTION_DATABASE_PERMISSIONS_ERROR,
+ {"invalid": ["database"]},
+ ),
+ _ErrorPatterns.QUERY_SYNTAX_ERROR_REGEX: (
+ "Please check your query for syntax errors at or "
+ 'near "%(err)s". Then, try running your query again.',
+ SupersetErrorType.SYNTAX_ERROR,
+ {},
+ ),
+ _ErrorPatterns.QUERY_COLUMN_NOT_EXIST_REGEX: (
+ 'Column "%(column)s" not found in "%(view)s".',
+ SupersetErrorType.COLUMN_DOES_NOT_EXIST_ERROR,
+ {},
+ ),
+ _ErrorPatterns.QUERY_GROUPBY_ERROR_REGEX: (
+ "Invalid aggregation expression.",
+ SupersetErrorType.SYNTAX_ERROR,
+ {},
+ ),
+ _ErrorPatterns.QUERY_GROUPBY_CANT_PROJ_REGEX: (
+ '"%(exp)s" is neither an aggregation function nor '
+ "appears in the GROUP BY clause.",
+ SupersetErrorType.SYNTAX_ERROR,
+ {},
+ ),
+ }
+
+ @classmethod
+ def epoch_to_dttm(cls) -> str:
+ return "GETTIMEFROMMILLIS({col})"
+
+ @classmethod
+ def convert_dttm(
+ cls, target_type: str, dttm: datetime, db_extra: Optional[dict[str, Any]] = None
+ ) -> Optional[str]:
+ sqla_type = cls.get_sqla_column_type(target_type)
+ if isinstance(sqla_type, Date):
+ return f"TO_DATE('yyyy-MM-dd', '{dttm.date().isoformat()}')"
+ if isinstance(sqla_type, DateTime):
+ dttm_formatted = dttm.isoformat(sep=" ", timespec="milliseconds")
+ return f"TO_TIMESTAMP('yyyy-MM-dd HH:mm:ss.SSS', '{dttm_formatted}')"
+ return None
+
+ @classmethod
+ def get_datatype(cls, type_code: Any) -> Optional[str]:
+ # pylint: disable=import-outside-toplevel
+ from psycopg2.extensions import binary_types, string_types
+
+ # Obtain data type names from psycopg2
+ types = binary_types.copy()
+ types.update(string_types)
+ if type_code in types:
+ return types[type_code].name
+ return None
diff --git a/superset/db_engine_specs/hive.py b/superset/db_engine_specs/hive.py
index e3cf128b7a2c6..6288866db93ee 100644
--- a/superset/db_engine_specs/hive.py
+++ b/superset/db_engine_specs/hive.py
@@ -537,8 +537,9 @@ def get_url_for_impersonation(
return url
@classmethod
- def update_impersonation_config(
+ def update_impersonation_config( # pylint: disable=too-many-arguments
cls,
+ database: Database,
connect_args: dict[str, Any],
uri: str,
username: str | None,
@@ -547,6 +548,7 @@ def update_impersonation_config(
"""
Update a configuration dictionary
that can set the correct properties for impersonating users
+ :param database: the Database Object
:param connect_args:
:param uri: URI string
:param impersonate_user: Flag indicating if impersonation is enabled
diff --git a/superset/db_engine_specs/impala.py b/superset/db_engine_specs/impala.py
index ea74df83164f0..ce34ae5648f20 100644
--- a/superset/db_engine_specs/impala.py
+++ b/superset/db_engine_specs/impala.py
@@ -21,8 +21,9 @@
import re
import time
from datetime import datetime
-from typing import Any, TYPE_CHECKING
+from typing import Any, Optional, TYPE_CHECKING
+import requests
from flask import current_app
from sqlalchemy import types
from sqlalchemy.engine.reflection import Inspector
@@ -57,6 +58,8 @@ class ImpalaEngineSpec(BaseEngineSpec):
TimeGrain.YEAR: "TRUNC({col}, 'YYYY')",
}
+ has_query_id_before_execute = False
+
@classmethod
def epoch_to_dttm(cls) -> str:
return "from_unixtime({col})"
@@ -91,7 +94,7 @@ def has_implicit_cancel(cls) -> bool:
:see: handle_cursor
"""
- return True
+ return False
@classmethod
def execute(
@@ -160,3 +163,38 @@ def handle_cursor(cls, cursor: Any, query: Query) -> None:
except Exception: # pylint: disable=broad-except
logger.debug("Call to status() failed ")
return
+
+ @classmethod
+ def get_cancel_query_id(cls, cursor: Any, query: Query) -> Optional[str]:
+ """
+ Get Impala Query ID that will be used to cancel the running
+ queries to release impala resources.
+
+ :param cursor: Cursor instance in which the query will be executed
+ :param query: Query instance
+ :return: Impala Query ID
+ """
+ last_operation = getattr(cursor, "_last_operation", None)
+ if not last_operation:
+ return None
+ guid = last_operation.handle.operationId.guid[::-1].hex()
+ return f"{guid[-16:]}:{guid[:16]}"
+
+ @classmethod
+ def cancel_query(cls, cursor: Any, query: Query, cancel_query_id: str) -> bool:
+ """
+ Cancel query in the underlying database.
+
+ :param cursor: New cursor instance to the db of the query
+ :param query: Query instance
+ :param cancel_query_id: impala db not need
+ :return: True if query cancelled successfully, False otherwise
+ """
+ try:
+ impala_host = query.database.url_object.host
+ url = f"http://{impala_host}:25000/cancel_query?query_id={cancel_query_id}"
+ response = requests.post(url, timeout=3)
+ except Exception: # pylint: disable=broad-except
+ return False
+
+ return bool(response and response.status_code == 200)
diff --git a/superset/db_engine_specs/mssql.py b/superset/db_engine_specs/mssql.py
index 464f6cf2b9c8d..5b8ba457bafa3 100644
--- a/superset/db_engine_specs/mssql.py
+++ b/superset/db_engine_specs/mssql.py
@@ -14,6 +14,8 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+from __future__ import annotations
+
import logging
import re
from datetime import datetime
@@ -27,6 +29,7 @@
from superset.constants import TimeGrain
from superset.db_engine_specs.base import BaseEngineSpec, LimitMethod
from superset.errors import SupersetErrorType
+from superset.models.sql_types.mssql_sql_types import GUID
from superset.utils.core import GenericDataType
logger = logging.getLogger(__name__)
@@ -87,6 +90,11 @@ class MssqlEngineSpec(BaseEngineSpec):
SMALLDATETIME(),
GenericDataType.TEMPORAL,
),
+ (
+ re.compile(r"^uniqueidentifier.*", re.IGNORECASE),
+ GUID(),
+ GenericDataType.STRING,
+ ),
)
custom_errors: dict[Pattern[str], tuple[str, SupersetErrorType, dict[str, Any]]] = {
diff --git a/superset/db_engine_specs/postgres.py b/superset/db_engine_specs/postgres.py
index 70373927d521b..6281c6b3b0ff3 100644
--- a/superset/db_engine_specs/postgres.py
+++ b/superset/db_engine_specs/postgres.py
@@ -351,7 +351,16 @@ def get_allow_cost_estimate(cls, extra: dict[str, Any]) -> bool:
return True
@classmethod
- def estimate_statement_cost(cls, statement: str, cursor: Any) -> dict[str, Any]:
+ def estimate_statement_cost(
+ cls, database: Database, statement: str, cursor: Any
+ ) -> dict[str, Any]:
+ """
+ Run a SQL query that estimates the cost of a given statement.
+ :param database: A Database object
+ :param statement: A single SQL statement
+ :param cursor: Cursor instance
+ :return: JSON response from Trino
+ """
sql = f"EXPLAIN {statement}"
cursor.execute(sql)
diff --git a/superset/db_engine_specs/presto.py b/superset/db_engine_specs/presto.py
index f0664564f872c..df5e1c643fa1f 100644
--- a/superset/db_engine_specs/presto.py
+++ b/superset/db_engine_specs/presto.py
@@ -365,9 +365,12 @@ def get_schema_from_engine_params(
return parse.unquote(database.split("/")[1])
@classmethod
- def estimate_statement_cost(cls, statement: str, cursor: Any) -> dict[str, Any]:
+ def estimate_statement_cost(
+ cls, database: Database, statement: str, cursor: Any
+ ) -> dict[str, Any]:
"""
Run a SQL query that estimates the cost of a given statement.
+ :param database: A Database object
:param statement: A single SQL statement
:param cursor: Cursor instance
:return: JSON response from Trino
@@ -945,8 +948,9 @@ def get_allow_cost_estimate(cls, extra: dict[str, Any]) -> bool:
return version is not None and Version(version) >= Version("0.319")
@classmethod
- def update_impersonation_config(
+ def update_impersonation_config( # pylint: disable=too-many-arguments
cls,
+ database: Database,
connect_args: dict[str, Any],
uri: str,
username: str | None,
@@ -955,6 +959,8 @@ def update_impersonation_config(
"""
Update a configuration dictionary
that can set the correct properties for impersonating users
+
+ :param connect_args: the Database object
:param connect_args: config to be updated
:param uri: URI string
:param username: Effective username
diff --git a/superset/db_engine_specs/trino.py b/superset/db_engine_specs/trino.py
index 49615c39cba52..ad00557f650cc 100644
--- a/superset/db_engine_specs/trino.py
+++ b/superset/db_engine_specs/trino.py
@@ -27,11 +27,13 @@
import numpy as np
import pandas as pd
import pyarrow as pa
-from flask import ctx, current_app, Flask, g
+import requests
+from flask import copy_current_request_context, ctx, current_app, Flask, g
from sqlalchemy import text
from sqlalchemy.engine.reflection import Inspector
from sqlalchemy.engine.url import URL
from sqlalchemy.exc import NoSuchTableError
+from trino.exceptions import HttpError
from superset import db
from superset.constants import QUERY_CANCEL_KEY, QUERY_EARLY_CANCEL_KEY, USER_AGENT
@@ -60,11 +62,28 @@
logger = logging.getLogger(__name__)
+class CustomTrinoAuthErrorMeta(type):
+ def __instancecheck__(cls, instance: object) -> bool:
+ logger.info("is this being called?")
+ return isinstance(
+ instance, HttpError
+ ) and "error 401: b'Invalid credentials'" in str(instance)
+
+
+class TrinoAuthError(HttpError, metaclass=CustomTrinoAuthErrorMeta):
+ pass
+
+
class TrinoEngineSpec(PrestoBaseEngineSpec):
engine = "trino"
engine_name = "Trino"
allows_alias_to_source_column = False
+ # OAuth 2.0
+ supports_oauth2 = True
+ oauth2_exception = TrinoAuthError
+ oauth2_token_request_type = "data"
+
@classmethod
def get_extra_table_metadata(
cls,
@@ -116,8 +135,9 @@ def get_extra_table_metadata(
return metadata
@classmethod
- def update_impersonation_config(
+ def update_impersonation_config( # pylint: disable=too-many-arguments
cls,
+ database: Database,
connect_args: dict[str, Any],
uri: str,
username: str | None,
@@ -126,6 +146,7 @@ def update_impersonation_config(
"""
Update a configuration dictionary
that can set the correct properties for impersonating users
+ :param database: the Database object
:param connect_args: config to be updated
:param uri: URI string
:param username: Effective username
@@ -140,6 +161,10 @@ def update_impersonation_config(
# Set principal_username=$effective_username
if backend_name == "trino" and username is not None:
connect_args["user"] = username
+ if access_token is not None:
+ http_session = requests.Session()
+ http_session.headers.update({"Authorization": f"Bearer {access_token}"})
+ connect_args["http_session"] = http_session
@classmethod
def get_url_for_impersonation(
@@ -152,6 +177,7 @@ def get_url_for_impersonation(
"""
Return a modified URL with the username set.
+ :param access_token: Personal access token for OAuth2
:param url: SQLAlchemy URL object
:param impersonate_user: Flag indicating if impersonation is enabled
:param username: Effective username
@@ -226,6 +252,7 @@ def execute_with_cursor(
execute_result: dict[str, Any] = {}
execute_event = threading.Event()
+ @copy_current_request_context
def _execute(
results: dict[str, Any],
event: threading.Event,
diff --git a/superset/migrations/shared/catalogs.py b/superset/migrations/shared/catalogs.py
index b75214291b0d9..3f14598eb6e82 100644
--- a/superset/migrations/shared/catalogs.py
+++ b/superset/migrations/shared/catalogs.py
@@ -381,7 +381,17 @@ def upgrade_catalog_perms(engines: set[str] | None = None) -> None:
# analytical DB. If we can't connect to the analytical DB during the migration
# we should stop it, since we need the default catalog in order to update
# existing models.
- if default_catalog := database.get_default_catalog():
+ try:
+ default_catalog = database.get_default_catalog()
+ except GenericDBException as ex:
+ logger.warning(
+ "Error fetching default catalog for database %s: %s",
+ database.database_name,
+ ex,
+ )
+ continue
+
+ if default_catalog:
upgrade_database_catalogs(database, default_catalog, session)
session.flush()
@@ -558,7 +568,17 @@ def downgrade_catalog_perms(engines: set[str] | None = None) -> None:
) or not db_engine_spec.supports_catalog:
continue
- if default_catalog := database.get_default_catalog():
+ try:
+ default_catalog = database.get_default_catalog()
+ except GenericDBException as ex:
+ logger.warning(
+ "Error fetching default catalog for database %s: %s",
+ database.database_name,
+ ex,
+ )
+ continue
+
+ if default_catalog:
downgrade_database_catalogs(database, default_catalog, session)
session.flush()
diff --git a/superset/models/core.py b/superset/models/core.py
index 5d3a6ea74ddab..4181412727a27 100755
--- a/superset/models/core.py
+++ b/superset/models/core.py
@@ -29,6 +29,7 @@
from copy import deepcopy
from datetime import datetime
from functools import lru_cache
+from inspect import signature
from typing import Any, Callable, cast, TYPE_CHECKING
import numpy
@@ -510,12 +511,14 @@ def _get_sqla_engine( # pylint: disable=too-many-locals
logger.debug("Database._get_sqla_engine(). Masked URL: %s", str(masked_url))
if self.impersonate_user:
- self.db_engine_spec.update_impersonation_config(
- connect_args,
- str(sqlalchemy_url),
- effective_username,
- access_token,
+ # PR #30674 changed the signature of the method to include database.
+ # This ensures that the change is backwards compatible
+ args = [connect_args, str(sqlalchemy_url), effective_username, access_token]
+ args = self.add_database_to_signature(
+ self.db_engine_spec.update_impersonation_config,
+ args,
)
+ self.db_engine_spec.update_impersonation_config(*args)
if connect_args:
params["connect_args"] = connect_args
@@ -543,6 +546,24 @@ def _get_sqla_engine( # pylint: disable=too-many-locals
except Exception as ex:
raise self.db_engine_spec.get_dbapi_mapped_exception(ex) from ex
+ def add_database_to_signature(
+ self,
+ func: Callable[..., None],
+ args: list[Any],
+ ) -> list[Any]:
+ """
+ Examines a function signature looking for a database param.
+ If the signature requires a database, the function appends self in the
+ proper position.
+ """
+
+ # PR #30674 changed the signature of the method to include database.
+ # This ensures that the change is backwards compatible
+ sig = signature(func)
+ if "database" in (params := sig.parameters.keys()):
+ args.insert(list(params).index("database"), self)
+ return args
+
@contextmanager
def get_raw_connection(
self,
diff --git a/superset/models/sql_types/mssql_sql_types.py b/superset/models/sql_types/mssql_sql_types.py
new file mode 100644
index 0000000000000..add40e31006ad
--- /dev/null
+++ b/superset/models/sql_types/mssql_sql_types.py
@@ -0,0 +1,61 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# pylint: disable=abstract-method
+import uuid
+from typing import Any, Optional
+
+from sqlalchemy.engine.interfaces import Dialect
+from sqlalchemy.sql.sqltypes import CHAR
+from sqlalchemy.sql.visitors import Visitable
+from sqlalchemy.types import TypeDecorator
+
+# _compiler_dispatch is defined to help with type compilation
+
+
+class GUID(TypeDecorator):
+ """
+ A type for SQL Server's uniqueidentifier, stored as stringified UUIDs.
+ """
+
+ impl = CHAR
+
+ @property
+ def python_type(self) -> type[uuid.UUID]:
+ """The Python type for this SQL type is `uuid.UUID`."""
+ return uuid.UUID
+
+ @classmethod
+ def _compiler_dispatch(cls, _visitor: Visitable, **_kw: Any) -> str:
+ """Return the SQL type for the GUID type, which is CHAR(36) in SQL Server."""
+ return "CHAR(36)"
+
+ def process_bind_param(self, value: str, dialect: Dialect) -> Optional[str]:
+ """Prepare the UUID value for binding to the database."""
+ if value is None:
+ return None
+ if not isinstance(value, uuid.UUID):
+ return str(uuid.UUID(value)) # Convert to string UUID if needed
+ return str(value)
+
+ def process_result_value(
+ self, value: Optional[str], dialect: Dialect
+ ) -> Optional[uuid.UUID]:
+ """Convert the string back to a UUID when retrieving from the database."""
+ if value is None:
+ return None
+ return uuid.UUID(value)
diff --git a/superset/reports/notifications/email.py b/superset/reports/notifications/email.py
index f5943f53927da..b4514d43aa815 100644
--- a/superset/reports/notifications/email.py
+++ b/superset/reports/notifications/email.py
@@ -84,13 +84,17 @@ class EmailNotification(BaseNotification): # pylint: disable=too-few-public-met
def _get_smtp_domain() -> str:
return parseaddr(app.config["SMTP_MAIL_FROM"])[1].split("@")[1]
- @staticmethod
- def _error_template(text: str) -> str:
+ def _error_template(self, text: str) -> str:
+ call_to_action = self._get_call_to_action()
return __(
"""
- Error: %(text)s
+ Your report/alert was unable to be generated because of the following error: %(text)s
+ Please check your dashboard/chart for errors.
+ %(call_to_action)s
""",
text=text,
+ url=self._content.url,
+ call_to_action=call_to_action,
)
def _get_content(self) -> EmailContent:
@@ -130,7 +134,6 @@ def _get_content(self) -> EmailContent:
else:
html_table = ""
- call_to_action = __(app.config["EMAIL_REPORTS_CTA"])
img_tags = []
for msgid in images.keys():
img_tags.append(
@@ -140,6 +143,7 @@ def _get_content(self) -> EmailContent:
"""
)
img_tag = "".join(img_tags)
+ call_to_action = self._get_call_to_action()
body = textwrap.dedent(
f"""
@@ -190,6 +194,9 @@ def _get_subject(self) -> str:
title=self._content.name,
)
+ def _get_call_to_action(self) -> str:
+ return __(app.config["EMAIL_REPORTS_CTA"])
+
def _get_to(self) -> str:
return json.loads(self._recipient.recipient_config_json)["target"]
diff --git a/superset/superset_typing.py b/superset/superset_typing.py
index 3a850e0acb672..c3c40cd31a918 100644
--- a/superset/superset_typing.py
+++ b/superset/superset_typing.py
@@ -149,6 +149,10 @@ class OAuth2ClientConfig(TypedDict):
# expired access token.
token_request_uri: str
+ # Not all identity providers expect json. Keycloak expects a form encoded request,
+ # which in the `requests` package context means using the `data` param, not `json`.
+ request_content_type: str
+
class OAuth2TokenResponse(TypedDict, total=False):
"""
diff --git a/superset/utils/oauth2.py b/superset/utils/oauth2.py
index b889ef83c5e75..95db2921f6cd6 100644
--- a/superset/utils/oauth2.py
+++ b/superset/utils/oauth2.py
@@ -23,7 +23,7 @@
import backoff
import jwt
from flask import current_app, url_for
-from marshmallow import EXCLUDE, fields, post_load, Schema
+from marshmallow import EXCLUDE, fields, post_load, Schema, validate
from superset import db
from superset.distributed_lock import KeyValueDistributedLock
@@ -192,3 +192,8 @@ class OAuth2ClientConfigSchema(Schema):
)
authorization_request_uri = fields.String(required=True)
token_request_uri = fields.String(required=True)
+ request_content_type = fields.String(
+ required=False,
+ load_default=lambda: "json",
+ validate=validate.OneOf(["json", "data"]),
+ )
diff --git a/superset/utils/pandas_postprocessing/compare.py b/superset/utils/pandas_postprocessing/compare.py
index b20682027f4a1..64442280b2af2 100644
--- a/superset/utils/pandas_postprocessing/compare.py
+++ b/superset/utils/pandas_postprocessing/compare.py
@@ -81,5 +81,10 @@ def compare( # pylint: disable=too-many-arguments
df = pd.concat([df, diff_df], axis=1)
if drop_original_columns:
- df = df.drop(source_columns + compare_columns, axis=1)
+ level = (
+ 0
+ if isinstance(df.columns, pd.MultiIndex) and df.columns.nlevels > 1
+ else None
+ )
+ df = df.drop(source_columns + compare_columns, axis=1, level=level)
return df
diff --git a/superset/utils/screenshots.py b/superset/utils/screenshots.py
index b31d9c0e4412b..96c0f40d6da51 100644
--- a/superset/utils/screenshots.py
+++ b/superset/utils/screenshots.py
@@ -24,6 +24,7 @@
from superset import feature_flag_manager
from superset.dashboards.permalink.types import DashboardPermalinkState
+from superset.extensions import event_logger
from superset.utils.hashing import md5_sha_from_dict
from superset.utils.urls import modify_url_query
from superset.utils.webdriver import (
@@ -91,7 +92,8 @@ def get_screenshot(
self, user: User, window_size: WindowSize | None = None
) -> bytes | None:
driver = self.driver(window_size)
- self.screenshot = driver.get_screenshot(self.url, self.element, user)
+ with event_logger.log_context("screenshot", screenshot_url=self.url):
+ self.screenshot = driver.get_screenshot(self.url, self.element, user)
return self.screenshot
def get(
@@ -169,7 +171,10 @@ def compute_and_cache( # pylint: disable=too-many-arguments
# Assuming all sorts of things can go wrong with Selenium
try:
- payload = self.get_screenshot(user=user, window_size=window_size)
+ with event_logger.log_context(
+ f"screenshot.compute.{self.thumbnail_type}", force=force
+ ):
+ payload = self.get_screenshot(user=user, window_size=window_size)
except Exception as ex: # pylint: disable=broad-except
logger.warning("Failed at generating thumbnail %s", ex, exc_info=True)
@@ -182,7 +187,10 @@ def compute_and_cache( # pylint: disable=too-many-arguments
if payload:
logger.info("Caching thumbnail: %s", cache_key)
- cache.set(cache_key, payload)
+ with event_logger.log_context(
+ f"screenshot.cache.{self.thumbnail_type}", force=force
+ ):
+ cache.set(cache_key, payload)
logger.info("Done caching thumbnail")
return payload
diff --git a/superset/views/sql_lab/views.py b/superset/views/sql_lab/views.py
index 29d0fdba758e9..29cf640d8c034 100644
--- a/superset/views/sql_lab/views.py
+++ b/superset/views/sql_lab/views.py
@@ -29,11 +29,12 @@
from superset.models.sql_lab import Query, SavedQuery, TableSchema, TabState
from superset.superset_typing import FlaskResponse
from superset.utils import json
-from superset.utils.core import get_user_id
+from superset.utils.core import error_msg_from_exception, get_user_id
from superset.views.base import (
BaseSupersetView,
DeleteMixin,
DeprecateModelViewMixin,
+ json_error_response,
json_success,
SupersetModelView,
)
@@ -84,48 +85,56 @@ class TabStateView(BaseSupersetView):
@has_access_api
@expose("/", methods=("POST",))
def post(self) -> FlaskResponse:
- query_editor = json.loads(request.form["queryEditor"])
- tab_state = TabState(
- user_id=get_user_id(),
- # This is for backward compatibility
- label=query_editor.get("name")
- or query_editor.get("title", __("Untitled Query")),
- active=True,
- database_id=query_editor["dbId"],
- catalog=query_editor.get("catalog"),
- schema=query_editor.get("schema"),
- sql=query_editor.get("sql", "SELECT ..."),
- query_limit=query_editor.get("queryLimit"),
- hide_left_bar=query_editor.get("hideLeftBar"),
- saved_query_id=query_editor.get("remoteId"),
- template_params=query_editor.get("templateParams"),
- )
- (
- db.session.query(TabState)
- .filter_by(user_id=get_user_id())
- .update({"active": False})
- )
- db.session.add(tab_state)
- db.session.commit()
- return json_success(json.dumps({"id": tab_state.id}))
+ try:
+ query_editor = json.loads(request.form["queryEditor"])
+ tab_state = TabState(
+ user_id=get_user_id(),
+ # This is for backward compatibility
+ label=query_editor.get("name")
+ or query_editor.get("title", __("Untitled Query")),
+ active=True,
+ database_id=query_editor["dbId"],
+ catalog=query_editor.get("catalog"),
+ schema=query_editor.get("schema"),
+ sql=query_editor.get("sql", "SELECT ..."),
+ query_limit=query_editor.get("queryLimit"),
+ hide_left_bar=query_editor.get("hideLeftBar"),
+ saved_query_id=query_editor.get("remoteId"),
+ template_params=query_editor.get("templateParams"),
+ )
+ (
+ db.session.query(TabState)
+ .filter_by(user_id=get_user_id())
+ .update({"active": False})
+ )
+ db.session.add(tab_state)
+ db.session.commit()
+ return json_success(json.dumps({"id": tab_state.id}))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("/", methods=("DELETE",))
def delete(self, tab_state_id: int) -> FlaskResponse:
- owner_id = _get_owner_id(tab_state_id)
- if owner_id is None:
- return Response(status=404)
- if owner_id != get_user_id():
- return Response(status=403)
-
- db.session.query(TabState).filter(TabState.id == tab_state_id).delete(
- synchronize_session=False
- )
- db.session.query(TableSchema).filter(
- TableSchema.tab_state_id == tab_state_id
- ).delete(synchronize_session=False)
- db.session.commit()
- return json_success(json.dumps("OK"))
+ try:
+ owner_id = _get_owner_id(tab_state_id)
+ if owner_id is None:
+ return Response(status=404)
+ if owner_id != get_user_id():
+ return Response(status=403)
+
+ db.session.query(TabState).filter(TabState.id == tab_state_id).delete(
+ synchronize_session=False
+ )
+ db.session.query(TableSchema).filter(
+ TableSchema.tab_state_id == tab_state_id
+ ).delete(synchronize_session=False)
+ db.session.commit()
+ return json_success(json.dumps("OK"))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("/", methods=("GET",))
@@ -146,19 +155,23 @@ def get(self, tab_state_id: int) -> FlaskResponse:
@has_access_api
@expose("/activate", methods=("POST",))
def activate(self, tab_state_id: int) -> FlaskResponse:
- owner_id = _get_owner_id(tab_state_id)
- if owner_id is None:
- return Response(status=404)
- if owner_id != get_user_id():
- return Response(status=403)
-
- (
- db.session.query(TabState)
- .filter_by(user_id=get_user_id())
- .update({"active": TabState.id == tab_state_id})
- )
- db.session.commit()
- return json_success(json.dumps(tab_state_id))
+ try:
+ owner_id = _get_owner_id(tab_state_id)
+ if owner_id is None:
+ return Response(status=404)
+ if owner_id != get_user_id():
+ return Response(status=403)
+
+ (
+ db.session.query(TabState)
+ .filter_by(user_id=get_user_id())
+ .update({"active": TabState.id == tab_state_id})
+ )
+ db.session.commit()
+ return json_success(json.dumps(tab_state_id))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("", methods=("PUT",))
@@ -169,102 +182,118 @@ def put(self, tab_state_id: int) -> FlaskResponse:
if owner_id != get_user_id():
return Response(status=403)
- fields = {k: json.loads(v) for k, v in request.form.to_dict().items()}
- if client_id := fields.get("latest_query_id"):
- query = db.session.query(Query).filter_by(client_id=client_id).one_or_none()
- if not query:
- return self.json_response({"error": "Bad request"}, status=400)
- db.session.query(TabState).filter_by(id=tab_state_id).update(fields)
- db.session.commit()
- return json_success(json.dumps(tab_state_id))
+ try:
+ fields = {k: json.loads(v) for k, v in request.form.to_dict().items()}
+ db.session.query(TabState).filter_by(id=tab_state_id).update(fields)
+ db.session.commit()
+ return json_success(json.dumps(tab_state_id))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("/migrate_query", methods=("POST",))
def migrate_query(self, tab_state_id: int) -> FlaskResponse:
- owner_id = _get_owner_id(tab_state_id)
- if owner_id is None:
- return Response(status=404)
- if owner_id != get_user_id():
- return Response(status=403)
-
- client_id = json.loads(request.form["queryId"])
- db.session.query(Query).filter_by(client_id=client_id).update(
- {"sql_editor_id": tab_state_id}
- )
- db.session.commit()
- return json_success(json.dumps(tab_state_id))
+ try:
+ owner_id = _get_owner_id(tab_state_id)
+ if owner_id is None:
+ return Response(status=404)
+ if owner_id != get_user_id():
+ return Response(status=403)
+
+ client_id = json.loads(request.form["queryId"])
+ db.session.query(Query).filter_by(client_id=client_id).update(
+ {"sql_editor_id": tab_state_id}
+ )
+ db.session.commit()
+ return json_success(json.dumps(tab_state_id))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("/query/", methods=("DELETE",))
def delete_query(self, tab_state_id: int, client_id: str) -> FlaskResponse:
- # Before deleting the query, ensure it's not tied to any
- # active tab as the last query. If so, replace the query
- # with the latest one created in that tab
- tab_state_query = db.session.query(TabState).filter_by(
- id=tab_state_id, latest_query_id=client_id
- )
- if tab_state_query.count():
- query = (
- db.session.query(Query)
- .filter(
- and_(
- Query.client_id != client_id,
- Query.user_id == get_user_id(),
- Query.sql_editor_id == str(tab_state_id),
- ),
- )
- .order_by(Query.id.desc())
- .first()
- )
- tab_state_query.update(
- {"latest_query_id": query.client_id if query else None}
+ try:
+ # Before deleting the query, ensure it's not tied to any
+ # active tab as the last query. If so, replace the query
+ # with the latest one created in that tab
+ tab_state_query = db.session.query(TabState).filter_by(
+ id=tab_state_id, latest_query_id=client_id
)
+ if tab_state_query.count():
+ query = (
+ db.session.query(Query)
+ .filter(
+ and_(
+ Query.client_id != client_id,
+ Query.user_id == get_user_id(),
+ Query.sql_editor_id == str(tab_state_id),
+ ),
+ )
+ .order_by(Query.id.desc())
+ .first()
+ )
+ tab_state_query.update(
+ {"latest_query_id": query.client_id if query else None}
+ )
- db.session.query(Query).filter_by(
- client_id=client_id,
- user_id=get_user_id(),
- sql_editor_id=str(tab_state_id),
- ).delete(synchronize_session=False)
- db.session.commit()
- return json_success(json.dumps("OK"))
+ db.session.query(Query).filter_by(
+ client_id=client_id,
+ user_id=get_user_id(),
+ sql_editor_id=str(tab_state_id),
+ ).delete(synchronize_session=False)
+ db.session.commit()
+ return json_success(json.dumps("OK"))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
class TableSchemaView(BaseSupersetView):
@has_access_api
@expose("/", methods=("POST",))
def post(self) -> FlaskResponse:
- table = json.loads(request.form["table"])
-
- # delete any existing table schema
- db.session.query(TableSchema).filter(
- TableSchema.tab_state_id == table["queryEditorId"],
- TableSchema.database_id == table["dbId"],
- TableSchema.catalog == table.get("catalog"),
- TableSchema.schema == table["schema"],
- TableSchema.table == table["name"],
- ).delete(synchronize_session=False)
-
- table_schema = TableSchema(
- tab_state_id=table["queryEditorId"],
- database_id=table["dbId"],
- catalog=table.get("catalog"),
- schema=table["schema"],
- table=table["name"],
- description=json.dumps(table),
- expanded=True,
- )
- db.session.add(table_schema)
- db.session.commit()
- return json_success(json.dumps({"id": table_schema.id}))
+ try:
+ table = json.loads(request.form["table"])
+
+ # delete any existing table schema
+ db.session.query(TableSchema).filter(
+ TableSchema.tab_state_id == table["queryEditorId"],
+ TableSchema.database_id == table["dbId"],
+ TableSchema.catalog == table.get("catalog"),
+ TableSchema.schema == table["schema"],
+ TableSchema.table == table["name"],
+ ).delete(synchronize_session=False)
+
+ table_schema = TableSchema(
+ tab_state_id=table["queryEditorId"],
+ database_id=table["dbId"],
+ catalog=table.get("catalog"),
+ schema=table["schema"],
+ table=table["name"],
+ description=json.dumps(table),
+ expanded=True,
+ )
+ db.session.add(table_schema)
+ db.session.commit()
+ return json_success(json.dumps({"id": table_schema.id}))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("/", methods=("DELETE",))
def delete(self, table_schema_id: int) -> FlaskResponse:
- db.session.query(TableSchema).filter(TableSchema.id == table_schema_id).delete(
- synchronize_session=False
- )
- db.session.commit()
- return json_success(json.dumps("OK"))
+ try:
+ db.session.query(TableSchema).filter(
+ TableSchema.id == table_schema_id
+ ).delete(synchronize_session=False)
+ db.session.commit()
+ return json_success(json.dumps("OK"))
+ except Exception as ex: # pylint: disable=broad-except
+ db.session.rollback()
+ return json_error_response(error_msg_from_exception(ex), 400)
@has_access_api
@expose("//expanded", methods=("POST",))
diff --git a/tests/integration_tests/charts/api_tests.py b/tests/integration_tests/charts/api_tests.py
index a99ba04f78427..784c4651ada29 100644
--- a/tests/integration_tests/charts/api_tests.py
+++ b/tests/integration_tests/charts/api_tests.py
@@ -24,6 +24,7 @@
import prison
import pytest
import yaml
+from flask import g
from flask_babel import lazy_gettext as _
from parameterized import parameterized
from sqlalchemy import and_
@@ -62,6 +63,7 @@
dataset_config,
dataset_metadata_config,
)
+from tests.integration_tests.fixtures.query_context import get_query_context
from tests.integration_tests.fixtures.tags import (
create_custom_tags, # noqa: F401
get_filter_params,
@@ -2327,3 +2329,57 @@ def test_update_chart_no_tag_changes(self):
security_manager.add_permission_role(alpha_role, write_tags_perm)
security_manager.add_permission_role(alpha_role, tag_charts_perm)
+
+ @patch("superset.security.manager.SupersetSecurityManager.has_guest_access")
+ @patch("superset.security.manager.SupersetSecurityManager.is_guest_user")
+ @pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
+ def test_get_chart_data_as_guest_user(
+ self, is_guest_user, has_guest_access
+ ): # get_guest_rls_filters
+ """
+ Chart API: Test create simple chart
+ """
+ self.login(ADMIN_USERNAME)
+ g.user.rls = []
+ is_guest_user.return_value = True
+ has_guest_access.return_value = True
+
+ with mock.patch.object(Slice, "get_query_context") as mock_get_query_context:
+ mock_get_query_context.return_value = get_query_context("birth_names")
+ rv = self.client.post(
+ "api/v1/chart/data", # noqa: F541
+ json={
+ "datasource": {"id": 2, "type": "table"},
+ "queries": [
+ {
+ "extras": {"where": "", "time_grain_sqla": "P1D"},
+ "columns": ["name"],
+ "metrics": [{"label": "sum__num"}],
+ "orderby": [("sum__num", False)],
+ "row_limit": 100,
+ "granularity": "ds",
+ "time_range": "100 years ago : now",
+ "timeseries_limit": 0,
+ "timeseries_limit_metric": None,
+ "order_desc": True,
+ "filters": [
+ {"col": "gender", "op": "==", "val": "boy"},
+ {"col": "num", "op": "IS NOT NULL"},
+ {
+ "col": "name",
+ "op": "NOT IN",
+ "val": ["", '"abc"'],
+ },
+ ],
+ "having": "",
+ "where": "",
+ }
+ ],
+ "result_format": "json",
+ "result_type": "full",
+ },
+ )
+ data = json.loads(rv.data.decode("utf-8"))
+ result = data["result"]
+ excluded_key = "query"
+ assert all([excluded_key not in query for query in result])
diff --git a/tests/integration_tests/core_tests.py b/tests/integration_tests/core_tests.py
index 4f989611b2a29..e100d10da613e 100644
--- a/tests/integration_tests/core_tests.py
+++ b/tests/integration_tests/core_tests.py
@@ -1013,7 +1013,6 @@ def test_tabstate_update(self):
data = {"sql": json.dumps("select 1"), "latest_query_id": json.dumps(client_id)}
response = self.client.put(f"/tabstateview/{tab_state_id}", data=data)
assert response.status_code == 400
- assert response.json["error"] == "Bad request"
# generate query
db.session.add(Query(client_id=client_id, database_id=1))
db.session.commit()
diff --git a/tests/integration_tests/dashboards/api_tests.py b/tests/integration_tests/dashboards/api_tests.py
index f88bdccc0eccf..e02b5a116a39a 100644
--- a/tests/integration_tests/dashboards/api_tests.py
+++ b/tests/integration_tests/dashboards/api_tests.py
@@ -85,6 +85,15 @@ class TestDashboardApi(ApiOwnersTestCaseMixin, InsertChartMixin, SupersetTestCas
"published": False,
}
+ dashboard_put_filters_data = {
+ "modified": [
+ {"id": "native_filter_1", "name": "Filter 1"},
+ {"id": "native_filter_2", "name": "Filter 2"},
+ ],
+ "deleted": [],
+ "reordered": [],
+ }
+
@pytest.fixture()
def create_dashboards(self):
with self.create_app().app_context():
@@ -1719,6 +1728,175 @@ def test_update_dashboard(self):
db.session.delete(model)
db.session.commit()
+ def test_add_dashboard_filters(self):
+ """
+ Dashboard API: Test that a filter was added
+ """
+ admin = self.get_user("admin")
+ admin_role = self.get_role("Admin")
+ dashboard_id = self.insert_dashboard(
+ "title1", "slug1", [admin.id], roles=[admin_role.id]
+ ).id
+ self.login(ADMIN_USERNAME)
+ uri = f"api/v1/dashboard/{dashboard_id}/filters"
+ rv = self.put_assert_metric(uri, self.dashboard_put_filters_data, "put_filters")
+ assert rv.status_code == 200
+ model = db.session.query(Dashboard).get(dashboard_id)
+ json_metadata = model.json_metadata
+ native_filter_config = json.loads(json_metadata)["native_filter_configuration"]
+
+ assert native_filter_config[0]["name"] == "Filter 1"
+ db.session.delete(model)
+ db.session.commit()
+
+ def test_modify_dashboard_filters_values(self):
+ """
+ Dashboard API: Test that a filter was added
+ """
+ admin = self.get_user("admin")
+ admin_role = self.get_role("Admin")
+ json_metadata = {
+ "native_filter_configuration": [
+ {
+ "id": "native_filter_1",
+ "name": "Filter X",
+ "filterType": "filter_select",
+ "cascadeParentIds": [],
+ }
+ ]
+ }
+ dashboard_id = self.insert_dashboard(
+ "title1",
+ "slug1",
+ [admin.id],
+ roles=[admin_role.id],
+ json_metadata=json.dumps(json_metadata),
+ ).id
+ self.login(ADMIN_USERNAME)
+ uri = f"api/v1/dashboard/{dashboard_id}/filters"
+ rv = self.put_assert_metric(uri, self.dashboard_put_filters_data, "put_filters")
+
+ assert rv.status_code == 200
+ model = db.session.query(Dashboard).get(dashboard_id)
+ json_metadata = model.json_metadata
+ native_filter_config = json.loads(json_metadata)["native_filter_configuration"]
+
+ assert native_filter_config[0]["name"] == "Filter 1"
+
+ db.session.delete(model)
+ db.session.commit()
+
+ def test_modfify_dashboard_filters_order(self):
+ """
+ Dashboard API: Test filters reordered
+ """
+ admin = self.get_user("admin")
+ admin_role = self.get_role("Admin")
+ json_metadata = {
+ "native_filter_configuration": [
+ {
+ "id": "native_filter_1",
+ "name": "Filter 1",
+ "filterType": "filter_select",
+ "cascadeParentIds": [],
+ },
+ {
+ "id": "native_filter_2",
+ "name": "Filter 2",
+ "filterType": "filter_select",
+ "cascadeParentIds": [],
+ },
+ ]
+ }
+ dashboard_id = self.insert_dashboard(
+ "title1",
+ "slug1",
+ [admin.id],
+ roles=[admin_role.id],
+ json_metadata=json.dumps(json_metadata),
+ ).id
+ self.login(ADMIN_USERNAME)
+ uri = f"api/v1/dashboard/{dashboard_id}/filters"
+ put_data = {
+ **self.dashboard_put_filters_data,
+ "reordered": ["native_filter_2", "native_filter_1"],
+ }
+ rv = self.put_assert_metric(uri, put_data, "put_filters")
+ assert rv.status_code == 200
+ model = db.session.query(Dashboard).get(dashboard_id)
+ json_metadata = model.json_metadata
+ native_filter_config = json.loads(json_metadata)["native_filter_configuration"]
+
+ assert native_filter_config[0]["name"] == "Filter 2"
+
+ db.session.delete(model)
+ db.session.commit()
+
+ def test_dashboard_filters_deleted(self):
+ """
+ Dashboard API: Test filters deleted
+ """
+ admin = self.get_user("admin")
+ admin_role = self.get_role("Admin")
+ json_metadata = {
+ "native_filter_configuration": [
+ {
+ "id": "native_filter_1",
+ "name": "Filter 1",
+ "filterType": "filter_select",
+ "cascadeParentIds": [],
+ },
+ {
+ "id": "native_filter_2",
+ "name": "Filter 2",
+ "filterType": "filter_select",
+ "cascadeParentIds": [],
+ },
+ ]
+ }
+ dashboard_id = self.insert_dashboard(
+ "title1",
+ "slug1",
+ [admin.id],
+ roles=[admin_role.id],
+ json_metadata=json.dumps(json_metadata),
+ ).id
+ self.login(ADMIN_USERNAME)
+ uri = f"api/v1/dashboard/{dashboard_id}/filters"
+ put_data = {
+ **self.dashboard_put_filters_data,
+ "deleted": ["native_filter_1"],
+ }
+ rv = self.put_assert_metric(uri, put_data, "put_filters")
+ assert rv.status_code == 200
+ model = db.session.query(Dashboard).get(dashboard_id)
+ json_metadata = model.json_metadata
+ native_filter_config = json.loads(json_metadata)["native_filter_configuration"]
+
+ assert native_filter_config[0]["name"] == "Filter 2"
+
+ db.session.delete(model)
+ db.session.commit()
+
+ def test_modify_dashboard_filters_invalid_data(self):
+ """
+ Dashboard API: Test modify filters with invalid data
+ """
+ admin = self.get_user("admin")
+ admin_role = self.get_role("Admin")
+ dashboard_id = self.insert_dashboard(
+ "title1", "slug1", [admin.id], roles=[admin_role.id]
+ ).id
+ self.login(ADMIN_USERNAME)
+ uri = f"api/v1/dashboard/{dashboard_id}/filters"
+ put_data = {"invalid_key": "invalid_value"}
+ rv = self.put_assert_metric(uri, put_data, "put_filters")
+ assert rv.status_code == 400
+
+ model = db.session.query(Dashboard).get(dashboard_id)
+ db.session.delete(model)
+ db.session.commit()
+
def test_dashboard_get_list_no_username(self):
"""
Dashboard API: Tests that no username is returned
@@ -2847,7 +3025,9 @@ def _get_screenshot(self, dashboard_id, cache_key, download_format):
return self.client.get(uri)
@pytest.mark.usefixtures("create_dashboard_with_tag")
- def test_cache_dashboard_screenshot_success(self):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_cache_dashboard_screenshot_success(self, is_feature_enabled):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
dashboard = (
db.session.query(Dashboard)
@@ -2858,7 +3038,9 @@ def test_cache_dashboard_screenshot_success(self):
assert response.status_code == 202
@pytest.mark.usefixtures("create_dashboard_with_tag")
- def test_cache_dashboard_screenshot_dashboard_validation(self):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_cache_dashboard_screenshot_dashboard_validation(self, is_feature_enabled):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
dashboard = (
db.session.query(Dashboard)
@@ -2874,7 +3056,9 @@ def test_cache_dashboard_screenshot_dashboard_validation(self):
response = self._cache_screenshot(dashboard.id, invalid_payload)
assert response.status_code == 400
- def test_cache_dashboard_screenshot_dashboard_not_found(self):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_cache_dashboard_screenshot_dashboard_not_found(self, is_feature_enabled):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
non_existent_id = 999
response = self._cache_screenshot(non_existent_id)
@@ -2883,10 +3067,14 @@ def test_cache_dashboard_screenshot_dashboard_not_found(self):
@pytest.mark.usefixtures("create_dashboard_with_tag")
@patch("superset.dashboards.api.cache_dashboard_screenshot")
@patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key")
- def test_screenshot_success_png(self, mock_get_cache, mock_cache_task):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_screenshot_success_png(
+ self, is_feature_enabled, mock_get_cache, mock_cache_task
+ ):
"""
Validate screenshot returns png
"""
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
mock_cache_task.return_value = None
mock_get_cache.return_value = BytesIO(b"fake image data")
@@ -2909,12 +3097,14 @@ def test_screenshot_success_png(self, mock_get_cache, mock_cache_task):
@patch("superset.dashboards.api.cache_dashboard_screenshot")
@patch("superset.dashboards.api.build_pdf_from_screenshots")
@patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key")
+ @patch("superset.dashboards.api.is_feature_enabled")
def test_screenshot_success_pdf(
- self, mock_get_from_cache, mock_build_pdf, mock_cache_task
+ self, is_feature_enabled, mock_get_from_cache, mock_build_pdf, mock_cache_task
):
"""
Validate screenshot can return pdf.
"""
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
mock_cache_task.return_value = None
mock_get_from_cache.return_value = BytesIO(b"fake image data")
@@ -2937,7 +3127,11 @@ def test_screenshot_success_pdf(
@pytest.mark.usefixtures("create_dashboard_with_tag")
@patch("superset.dashboards.api.cache_dashboard_screenshot")
@patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key")
- def test_screenshot_not_in_cache(self, mock_get_cache, mock_cache_task):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_screenshot_not_in_cache(
+ self, is_feature_enabled, mock_get_cache, mock_cache_task
+ ):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
mock_cache_task.return_value = None
mock_get_cache.return_value = None
@@ -2954,7 +3148,9 @@ def test_screenshot_not_in_cache(self, mock_get_cache, mock_cache_task):
response = self._get_screenshot(dashboard.id, cache_key, "pdf")
assert response.status_code == 404
- def test_screenshot_dashboard_not_found(self):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_screenshot_dashboard_not_found(self, is_feature_enabled):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
non_existent_id = 999
response = self._get_screenshot(non_existent_id, "some_cache_key", "png")
@@ -2963,7 +3159,11 @@ def test_screenshot_dashboard_not_found(self):
@pytest.mark.usefixtures("create_dashboard_with_tag")
@patch("superset.dashboards.api.cache_dashboard_screenshot")
@patch("superset.dashboards.api.DashboardScreenshot.get_from_cache_key")
- def test_screenshot_invalid_download_format(self, mock_get_cache, mock_cache_task):
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_screenshot_invalid_download_format(
+ self, is_feature_enabled, mock_get_cache, mock_cache_task
+ ):
+ is_feature_enabled.return_value = True
self.login(ADMIN_USERNAME)
mock_cache_task.return_value = None
mock_get_cache.return_value = BytesIO(b"fake png data")
@@ -2980,3 +3180,20 @@ def test_screenshot_invalid_download_format(self, mock_get_cache, mock_cache_tas
response = self._get_screenshot(dashboard.id, cache_key, "invalid")
assert response.status_code == 404
+
+ @pytest.mark.usefixtures("create_dashboard_with_tag")
+ @patch("superset.dashboards.api.is_feature_enabled")
+ def test_cache_dashboard_screenshot_feature_disabled(self, is_feature_enabled):
+ is_feature_enabled.return_value = False
+ self.login(ADMIN_USERNAME)
+
+ dashboard = (
+ db.session.query(Dashboard)
+ .filter(Dashboard.dashboard_title == "dash with tag")
+ .first()
+ )
+
+ assert dashboard is not None
+
+ response = self._cache_screenshot(dashboard.id)
+ assert response.status_code == 404
diff --git a/tests/integration_tests/datasets/api_tests.py b/tests/integration_tests/datasets/api_tests.py
index 49110277bf328..b04d4cec73692 100644
--- a/tests/integration_tests/datasets/api_tests.py
+++ b/tests/integration_tests/datasets/api_tests.py
@@ -410,6 +410,145 @@ def test_get_dataset_item(self):
assert len(response["result"]["columns"]) == 3
assert len(response["result"]["metrics"]) == 2
+ def test_get_dataset_render_jinja(self):
+ """
+ Dataset API: Test get dataset with the render parameter.
+ """
+ database = get_example_database()
+ dataset = SqlaTable(
+ table_name="test_sql_table_with_jinja",
+ database=database,
+ schema=get_example_default_schema(),
+ main_dttm_col="default_dttm",
+ columns=[
+ TableColumn(
+ column_name="my_user_id",
+ type="INTEGER",
+ is_dttm=False,
+ ),
+ TableColumn(
+ column_name="calculated_test",
+ type="VARCHAR(255)",
+ is_dttm=False,
+ expression="'{{ current_username() }}'",
+ ),
+ ],
+ metrics=[
+ SqlMetric(
+ metric_name="param_test",
+ expression="{{ url_param('multiplier') }} * 1.4",
+ )
+ ],
+ sql="SELECT {{ current_user_id() }} as my_user_id",
+ )
+ db.session.add(dataset)
+ db.session.commit()
+
+ self.login(ADMIN_USERNAME)
+ admin = self.get_user(ADMIN_USERNAME)
+ uri = (
+ f"api/v1/dataset/{dataset.id}?"
+ "q=(columns:!(id,sql,columns.column_name,columns.expression,metrics.metric_name,metrics.expression))"
+ "&include_rendered_sql=true&multiplier=4"
+ )
+ rv = self.get_assert_metric(uri, "get")
+ assert rv.status_code == 200
+ response = json.loads(rv.data.decode("utf-8"))
+
+ assert response["result"] == {
+ "id": dataset.id,
+ "sql": "SELECT {{ current_user_id() }} as my_user_id",
+ "rendered_sql": f"SELECT {admin.id} as my_user_id",
+ "columns": [
+ {
+ "column_name": "my_user_id",
+ "expression": None,
+ },
+ {
+ "column_name": "calculated_test",
+ "expression": "'{{ current_username() }}'",
+ "rendered_expression": f"'{admin.username}'",
+ },
+ ],
+ "metrics": [
+ {
+ "metric_name": "param_test",
+ "expression": "{{ url_param('multiplier') }} * 1.4",
+ "rendered_expression": "4 * 1.4",
+ },
+ ],
+ }
+
+ db.session.delete(dataset)
+ db.session.commit()
+
+ def test_get_dataset_render_jinja_exceptions(self):
+ """
+ Dataset API: Test get dataset with the render parameter
+ when rendering raises an exception.
+ """
+ database = get_example_database()
+ dataset = SqlaTable(
+ table_name="test_sql_table_with_incorrect_jinja",
+ database=database,
+ schema=get_example_default_schema(),
+ main_dttm_col="default_dttm",
+ columns=[
+ TableColumn(
+ column_name="my_user_id",
+ type="INTEGER",
+ is_dttm=False,
+ ),
+ TableColumn(
+ column_name="calculated_test",
+ type="VARCHAR(255)",
+ is_dttm=False,
+ expression="'{{ current_username() }'",
+ ),
+ ],
+ metrics=[
+ SqlMetric(
+ metric_name="param_test",
+ expression="{{ url_param('multiplier') } * 1.4",
+ )
+ ],
+ sql="SELECT {{ current_user_id() } as my_user_id",
+ )
+ db.session.add(dataset)
+ db.session.commit()
+
+ self.login(ADMIN_USERNAME)
+
+ uri = f"api/v1/dataset/{dataset.id}?q=(columns:!(id,sql))&include_rendered_sql=true"
+ rv = self.get_assert_metric(uri, "get")
+ assert rv.status_code == 400
+ response = json.loads(rv.data.decode("utf-8"))
+ assert response["message"] == "Unable to render expression from dataset query."
+
+ uri = (
+ f"api/v1/dataset/{dataset.id}?q=(columns:!(id,metrics.expression))"
+ "&include_rendered_sql=true&multiplier=4"
+ )
+ rv = self.get_assert_metric(uri, "get")
+ assert rv.status_code == 400
+ response = json.loads(rv.data.decode("utf-8"))
+ assert response["message"] == "Unable to render expression from dataset metric."
+
+ uri = (
+ f"api/v1/dataset/{dataset.id}?q=(columns:!(id,columns.expression))"
+ "&include_rendered_sql=true"
+ )
+ rv = self.get_assert_metric(uri, "get")
+ assert rv.status_code == 400
+ response = json.loads(rv.data.decode("utf-8"))
+ assert (
+ response["message"]
+ == "Unable to render expression from dataset calculated column."
+ )
+
+ db.session.delete(dataset)
+ db.session.commit()
+
def test_get_dataset_distinct_schema(self):
"""
Dataset API: Test get dataset distinct schema
diff --git a/tests/integration_tests/db_engine_specs/postgres_tests.py b/tests/integration_tests/db_engine_specs/postgres_tests.py
index e4f9462d63069..a5ef1cdecab59 100644
--- a/tests/integration_tests/db_engine_specs/postgres_tests.py
+++ b/tests/integration_tests/db_engine_specs/postgres_tests.py
@@ -151,12 +151,13 @@ def test_estimate_statement_cost_select_star(self):
DB Eng Specs (postgres): Test estimate_statement_cost select star
"""
+ database = mock.Mock()
cursor = mock.Mock()
cursor.fetchone.return_value = (
"Seq Scan on birth_names (cost=0.00..1537.91 rows=75691 width=46)",
)
sql = "SELECT * FROM birth_names"
- results = PostgresEngineSpec.estimate_statement_cost(sql, cursor)
+ results = PostgresEngineSpec.estimate_statement_cost(database, sql, cursor)
assert results == {"Start-up cost": 0.0, "Total cost": 1537.91}
def test_estimate_statement_invalid_syntax(self):
@@ -165,6 +166,7 @@ def test_estimate_statement_invalid_syntax(self):
"""
from psycopg2 import errors
+ database = mock.Mock()
cursor = mock.Mock()
cursor.execute.side_effect = errors.SyntaxError(
"""
@@ -175,7 +177,7 @@ def test_estimate_statement_invalid_syntax(self):
)
sql = "DROP TABLE birth_names"
with self.assertRaises(errors.SyntaxError):
- PostgresEngineSpec.estimate_statement_cost(sql, cursor)
+ PostgresEngineSpec.estimate_statement_cost(database, sql, cursor)
def test_query_cost_formatter_example_costs(self):
"""
diff --git a/tests/integration_tests/db_engine_specs/presto_tests.py b/tests/integration_tests/db_engine_specs/presto_tests.py
index 798e31ee431a4..94e3ea62721a4 100644
--- a/tests/integration_tests/db_engine_specs/presto_tests.py
+++ b/tests/integration_tests/db_engine_specs/presto_tests.py
@@ -905,22 +905,26 @@ def test_select_star_presto_expand_data(
)
def test_estimate_statement_cost(self):
+ mock_database = mock.MagicMock()
mock_cursor = mock.MagicMock()
estimate_json = {"a": "b"}
mock_cursor.fetchone.return_value = [
'{"a": "b"}',
]
result = PrestoEngineSpec.estimate_statement_cost(
- "SELECT * FROM brth_names", mock_cursor
+ mock_database,
+ "SELECT * FROM brth_names",
+ mock_cursor,
)
assert result == estimate_json
def test_estimate_statement_cost_invalid_syntax(self):
+ mock_database = mock.MagicMock()
mock_cursor = mock.MagicMock()
mock_cursor.execute.side_effect = Exception()
with self.assertRaises(Exception):
PrestoEngineSpec.estimate_statement_cost(
- "DROP TABLE brth_names", mock_cursor
+ mock_database, "DROP TABLE brth_names", mock_cursor
)
def test_get_create_view(self):
diff --git a/tests/integration_tests/fixtures/dashboard_with_tabs.py b/tests/integration_tests/fixtures/dashboard_with_tabs.py
new file mode 100644
index 0000000000000..44f10e1cc29af
--- /dev/null
+++ b/tests/integration_tests/fixtures/dashboard_with_tabs.py
@@ -0,0 +1,651 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+import json
+
+import pytest
+
+from tests.integration_tests.dashboard_utils import create_dashboard
+from tests.integration_tests.test_app import app
+
+MULTIPLE_TABS_TBL_NAME = "multiple_tabs"
+
+
+@pytest.fixture(scope="session")
+def load_mutltiple_tabs_dashboard():
+ position_json = {
+ "CHART--0GPGmD-pO": {
+ "children": [],
+ "id": "CHART--0GPGmD-pO",
+ "meta": {
+ "chartId": 91,
+ "height": 56,
+ "sliceName": "Current Developers: Is this your first development job?",
+ "sliceNameOverride": "Is this your first development job?",
+ "uuid": "bfe5a8e6-146f-ef59-5e6c-13d519b236a8",
+ "width": 2,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-b7USYEngT",
+ ],
+ "type": "CHART",
+ },
+ "CHART--w_Br1tPP3": {
+ "children": [],
+ "id": "CHART--w_Br1tPP3",
+ "meta": {
+ "chartId": 85,
+ "height": 51,
+ "sliceName": "\u2708\ufe0f Relocation ability",
+ "uuid": "a6dd2d5a-2cdc-c8ec-f30c-85920f4f8a65",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW-DR80aHJA2c",
+ ],
+ "type": "CHART",
+ },
+ "CHART-0-zzTwBINh": {
+ "children": [],
+ "id": "CHART-0-zzTwBINh",
+ "meta": {
+ "chartId": 72,
+ "height": 55,
+ "sliceName": "Last Year Income Distribution",
+ "uuid": "a2ec5256-94b4-43c4-b8c7-b83f70c5d4df",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-b7USYEngT",
+ ],
+ "type": "CHART",
+ },
+ "CHART-37fu7fO6Z0": {
+ "children": [],
+ "id": "CHART-37fu7fO6Z0",
+ "meta": {
+ "chartId": 93,
+ "height": 69,
+ "sliceName": "Degrees vs Income",
+ "uuid": "02f546ae-1bf4-bd26-8bc2-14b9279c8a62",
+ "width": 7,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-kNjtGVFpp",
+ ],
+ "type": "CHART",
+ },
+ "CHART-5QwNlSbXYU": {
+ "children": [],
+ "id": "CHART-5QwNlSbXYU",
+ "meta": {
+ "chartId": 90,
+ "height": 69,
+ "sliceName": "Commute Time",
+ "uuid": "097c05c9-2dd2-481d-813d-d6c0c12b4a3d",
+ "width": 5,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-kNjtGVFpp",
+ ],
+ "type": "CHART",
+ },
+ "CHART-FKuVqq4kaA": {
+ "children": [],
+ "id": "CHART-FKuVqq4kaA",
+ "meta": {
+ "chartId": 50,
+ "height": 50,
+ "sliceName": "Work Location Preference",
+ "sliceNameOverride": "Work Location Preference",
+ "uuid": "e6b09c28-98cf-785f-4caf-320fd4fca802",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW-DR80aHJA2c",
+ ],
+ "type": "CHART",
+ },
+ "CHART-JnpdZOhVer": {
+ "children": [],
+ "id": "CHART-JnpdZOhVer",
+ "meta": {
+ "chartId": 51,
+ "height": 50,
+ "sliceName": "Highest degree held",
+ "uuid": "9f7d2b9c-6b3a-69f9-f03e-d3a141514639",
+ "width": 2,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW--BIzjz9F0",
+ "COLUMN-IEKAo_QJlz",
+ ],
+ "type": "CHART",
+ },
+ "CHART-LjfhrUkEef": {
+ "children": [],
+ "id": "CHART-LjfhrUkEef",
+ "meta": {
+ "chartId": 86,
+ "height": 68,
+ "sliceName": "First Time Developer & Commute Time",
+ "uuid": "067c4a1e-ae03-4c0c-8e2a-d2c0f4bf43c3",
+ "width": 5,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-s3l4os7YY",
+ ],
+ "type": "CHART",
+ },
+ "CHART-Q3pbwsH3id": {
+ "children": [],
+ "id": "CHART-Q3pbwsH3id",
+ "meta": {
+ "chartId": 79,
+ "height": 50,
+ "sliceName": "Are you an ethnic minority in your city?",
+ "sliceNameOverride": "Minority Status (in their city)",
+ "uuid": "def07750-b5c0-0b69-6228-cb2330916166",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-mOvr_xWm1",
+ ],
+ "type": "CHART",
+ },
+ "CHART-QVql08s5Bv": {
+ "children": [],
+ "id": "CHART-QVql08s5Bv",
+ "meta": {
+ "chartId": 92,
+ "height": 56,
+ "sliceName": "First Time Developer?",
+ "uuid": "edc75073-8f33-4123-a28d-cd6dfb33cade",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-b7USYEngT",
+ ],
+ "type": "CHART",
+ },
+ "CHART-UtSaz4pfV6": {
+ "children": [],
+ "id": "CHART-UtSaz4pfV6",
+ "meta": {
+ "chartId": 59,
+ "height": 50,
+ "sliceName": "Age distribution of respondents",
+ "uuid": "5f1ea868-604e-f69d-a241-5daa83ff33be",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-UsW-_RPAb",
+ "COLUMN-OJ5spdMmNh",
+ ],
+ "type": "CHART",
+ },
+ "CHART-VvFbGxi3X_": {
+ "children": [],
+ "id": "CHART-VvFbGxi3X_",
+ "meta": {
+ "chartId": 41,
+ "height": 62,
+ "sliceName": "Top 15 Languages Spoken at Home",
+ "uuid": "03a74c97-52fc-cf87-233c-d4275f8c550c",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-UsW-_RPAb",
+ "COLUMN-OJ5spdMmNh",
+ ],
+ "type": "CHART",
+ },
+ "CHART-XHncHuS5pZ": {
+ "children": [],
+ "id": "CHART-XHncHuS5pZ",
+ "meta": {
+ "chartId": 78,
+ "height": 41,
+ "sliceName": "Number of Aspiring Developers",
+ "sliceNameOverride": "What type of work would you prefer?",
+ "uuid": "a0e5329f-224e-6fc8-efd2-d37d0f546ee8",
+ "width": 2,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW-DR80aHJA2c",
+ ],
+ "type": "CHART",
+ },
+ "CHART-YSzS5GOOLf": {
+ "children": [],
+ "id": "CHART-YSzS5GOOLf",
+ "meta": {
+ "chartId": 49,
+ "height": 54,
+ "sliceName": "Ethnic Minority & Gender",
+ "uuid": "4880e4f4-b701-4be0-86f3-e7e89432e83b",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-mOvr_xWm1",
+ ],
+ "type": "CHART",
+ },
+ "CHART-ZECnzPz8Bi": {
+ "children": [],
+ "id": "CHART-ZECnzPz8Bi",
+ "meta": {
+ "chartId": 70,
+ "height": 74,
+ "sliceName": "Location of Current Developers",
+ "uuid": "5596e0f6-78a9-465d-8325-7139c794a06a",
+ "width": 7,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-s3l4os7YY",
+ ],
+ "type": "CHART",
+ },
+ "CHART-aytwlT4GAq": {
+ "children": [],
+ "id": "CHART-aytwlT4GAq",
+ "meta": {
+ "chartId": 83,
+ "height": 30,
+ "sliceName": "Breakdown of Developer Type",
+ "uuid": "b8386be8-f44e-6535-378c-2aa2ba461286",
+ "width": 6,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-y-GwJPgxLr",
+ ],
+ "type": "CHART",
+ },
+ "CHART-fLpTSAHpAO": {
+ "children": [],
+ "id": "CHART-fLpTSAHpAO",
+ "meta": {
+ "chartId": 60,
+ "height": 118,
+ "sliceName": "Country of Citizenship",
+ "uuid": "2ba66056-a756-d6a3-aaec-0c243fb7062e",
+ "width": 9,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-UsW-_RPAb",
+ ],
+ "type": "CHART",
+ },
+ "CHART-lQVSAw0Or3": {
+ "children": [],
+ "id": "CHART-lQVSAw0Or3",
+ "meta": {
+ "chartId": 94,
+ "height": 100,
+ "sliceName": "How do you prefer to work?",
+ "sliceNameOverride": "Preferred Employment Style vs Degree",
+ "uuid": "cb8998ab-9f93-4f0f-4e4b-3bfe4b0dea9d",
+ "width": 4,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW--BIzjz9F0",
+ ],
+ "type": "CHART",
+ },
+ "CHART-o-JPAWMZK-": {
+ "children": [],
+ "id": "CHART-o-JPAWMZK-",
+ "meta": {
+ "chartId": 69,
+ "height": 50,
+ "sliceName": "Gender",
+ "uuid": "0f6b447c-828c-e71c-87ac-211bc412b214",
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-mOvr_xWm1",
+ ],
+ "type": "CHART",
+ },
+ "CHART-v22McUFMtx": {
+ "children": [],
+ "id": "CHART-v22McUFMtx",
+ "meta": {
+ "chartId": 71,
+ "height": 52,
+ "sliceName": "How much do you expect to earn? ($0 - 100k)",
+ "sliceNameOverride": "\ud83d\udcb2Expected Income (excluding outliers)",
+ "uuid": "6d0ceb30-2008-d19c-d285-cf77dc764433",
+ "width": 4,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW--BIzjz9F0",
+ "COLUMN-IEKAo_QJlz",
+ ],
+ "type": "CHART",
+ },
+ "CHART-wxWVtlajRF": {
+ "children": [],
+ "id": "CHART-wxWVtlajRF",
+ "meta": {
+ "chartId": 82,
+ "height": 104,
+ "sliceName": "Preferred Employment Style",
+ "uuid": "bff88053-ccc4-92f2-d6f5-de83e950e8cd",
+ "width": 4,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW--BIzjz9F0",
+ ],
+ "type": "CHART",
+ },
+ "COLUMN-IEKAo_QJlz": {
+ "children": ["CHART-JnpdZOhVer", "CHART-v22McUFMtx"],
+ "id": "COLUMN-IEKAo_QJlz",
+ "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 4},
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW--BIzjz9F0",
+ ],
+ "type": "COLUMN",
+ },
+ "COLUMN-OJ5spdMmNh": {
+ "children": ["CHART-VvFbGxi3X_", "CHART-UtSaz4pfV6"],
+ "id": "COLUMN-OJ5spdMmNh",
+ "meta": {"background": "BACKGROUND_TRANSPARENT", "width": 3},
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-UsW-_RPAb",
+ ],
+ "type": "COLUMN",
+ },
+ "DASHBOARD_VERSION_KEY": "v2",
+ "GRID_ID": {
+ "children": ["TABS-L-d9eyOE-b"],
+ "id": "GRID_ID",
+ "parents": ["ROOT_ID"],
+ "type": "GRID",
+ },
+ "HEADER_ID": {
+ "id": "HEADER_ID",
+ "meta": {"text": "FCC New Coder Survey 2018"},
+ "type": "HEADER",
+ },
+ "MARKDOWN-BUmyHM2s0x": {
+ "children": [],
+ "id": "MARKDOWN-BUmyHM2s0x",
+ "meta": {
+ "code": "# Aspiring Developers\n\nThe mission of FreeCodeCamp is to \"help people learn to code for free\". With this in mind, it's no surprise that ~83% of this survey's respondents fall into the **Aspiring Developer** category.\n\nIn this tab, we use visualization to explore:\n\n- Interest in relocating for work\n- Preferences around work location & style\n- Distribution of expected income\n- Distribution of highest degree held\n- Heatmap of highest degree held vs employment style preference",
+ "height": 50,
+ "width": 4,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-YT6eNksV-",
+ "ROW-DR80aHJA2c",
+ ],
+ "type": "MARKDOWN",
+ },
+ "MARKDOWN-NQmSPDOtpl": {
+ "children": [],
+ "id": "MARKDOWN-NQmSPDOtpl",
+ "meta": {
+ "code": "# Current Developers\n\nWhile majority of the students on FCC are Aspiring developers, there's a nontrivial minority that's there to continue leveling up their skills (17% of the survey respondents).\n\nBased on how respondents self-identified in the start of the survey, they were asked different questions. In this tab, we use visualizations to explore:\n\n- The buckets of commute team these developers encounter\n- The proportion of developers whose current job is their first developer job\n- Distribution of last year's income\n- The geographic distribution of these developers\n- The overlap between commute time and if their current job is their first developer job\n- Potential link between highest degree earned and last year's income",
+ "height": 56,
+ "width": 4,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-l_9I0aNYZ",
+ "ROW-b7USYEngT",
+ ],
+ "type": "MARKDOWN",
+ },
+ "MARKDOWN-__u6CsUyfh": {
+ "children": [],
+ "id": "MARKDOWN-__u6CsUyfh",
+ "meta": {
+ "code": "## FreeCodeCamp New Coder Survey 2018\n\nEvery year, FCC surveys its user base (mostly budding software developers) to learn more about their interests, backgrounds, goals, job status, and socioeconomic features. This dashboard visualizes survey data from the 2018 survey.\n\n- [Survey link](https://freecodecamp.typeform.com/to/S3UeD9)\n- [Dataset](https://github.com/freeCodeCamp/2018-new-coder-survey)\n- [FCC Blog Post](https://www.freecodecamp.org/news/we-asked-20-000-people-who-they-are-and-how-theyre-learning-to-code-fff5d668969/)",
+ "height": 30,
+ "width": 6,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-y-GwJPgxLr",
+ ],
+ "type": "MARKDOWN",
+ },
+ "MARKDOWN-zc2mWxZeox": {
+ "children": [],
+ "id": "MARKDOWN-zc2mWxZeox",
+ "meta": {
+ "code": "# Demographics\n\nFreeCodeCamp is a completely-online community of people learning to code and consists of aspiring & current developers from all over the world. That doesn't necessarily mean that access to these types of opportunities are evenly distributed. \n\nThe following charts can begin to help us understand:\n\n- the original citizenship of the survey respondents\n- minority representation among both aspiring and current developers\n- their age distribution\n- household languages",
+ "height": 52,
+ "width": 3,
+ },
+ "parents": [
+ "ROOT_ID",
+ "GRID_ID",
+ "TABS-L-d9eyOE-b",
+ "TAB-AsMaxdYL_t",
+ "ROW-mOvr_xWm1",
+ ],
+ "type": "MARKDOWN",
+ },
+ "ROOT_ID": {"children": ["GRID_ID"], "id": "ROOT_ID", "type": "ROOT"},
+ "ROW--BIzjz9F0": {
+ "children": ["COLUMN-IEKAo_QJlz", "CHART-lQVSAw0Or3", "CHART-wxWVtlajRF"],
+ "id": "ROW--BIzjz9F0",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-YT6eNksV-"],
+ "type": "ROW",
+ },
+ "ROW-DR80aHJA2c": {
+ "children": [
+ "MARKDOWN-BUmyHM2s0x",
+ "CHART-XHncHuS5pZ",
+ "CHART--w_Br1tPP3",
+ "CHART-FKuVqq4kaA",
+ ],
+ "id": "ROW-DR80aHJA2c",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-YT6eNksV-"],
+ "type": "ROW",
+ },
+ "ROW-UsW-_RPAb": {
+ "children": ["COLUMN-OJ5spdMmNh", "CHART-fLpTSAHpAO"],
+ "id": "ROW-UsW-_RPAb",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-AsMaxdYL_t"],
+ "type": "ROW",
+ },
+ "ROW-b7USYEngT": {
+ "children": [
+ "MARKDOWN-NQmSPDOtpl",
+ "CHART--0GPGmD-pO",
+ "CHART-QVql08s5Bv",
+ "CHART-0-zzTwBINh",
+ ],
+ "id": "ROW-b7USYEngT",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-l_9I0aNYZ"],
+ "type": "ROW",
+ },
+ "ROW-kNjtGVFpp": {
+ "children": ["CHART-5QwNlSbXYU", "CHART-37fu7fO6Z0"],
+ "id": "ROW-kNjtGVFpp",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-l_9I0aNYZ"],
+ "type": "ROW",
+ },
+ "ROW-mOvr_xWm1": {
+ "children": [
+ "MARKDOWN-zc2mWxZeox",
+ "CHART-Q3pbwsH3id",
+ "CHART-o-JPAWMZK-",
+ "CHART-YSzS5GOOLf",
+ ],
+ "id": "ROW-mOvr_xWm1",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-AsMaxdYL_t"],
+ "type": "ROW",
+ },
+ "ROW-s3l4os7YY": {
+ "children": ["CHART-LjfhrUkEef", "CHART-ZECnzPz8Bi"],
+ "id": "ROW-s3l4os7YY",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-l_9I0aNYZ"],
+ "type": "ROW",
+ },
+ "ROW-y-GwJPgxLr": {
+ "children": ["MARKDOWN-__u6CsUyfh", "CHART-aytwlT4GAq"],
+ "id": "ROW-y-GwJPgxLr",
+ "meta": {"background": "BACKGROUND_TRANSPARENT"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b", "TAB-AsMaxdYL_t"],
+ "type": "ROW",
+ },
+ "TAB-AsMaxdYL_t": {
+ "children": ["ROW-y-GwJPgxLr", "ROW-mOvr_xWm1", "ROW-UsW-_RPAb"],
+ "id": "TAB-AsMaxdYL_t",
+ "meta": {"text": "Overview"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b"],
+ "type": "TAB",
+ },
+ "TAB-YT6eNksV-": {
+ "children": ["ROW-DR80aHJA2c", "ROW--BIzjz9F0"],
+ "id": "TAB-YT6eNksV-",
+ "meta": {"text": "\ud83d\ude80 Aspiring Developers"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b"],
+ "type": "TAB",
+ },
+ "TAB-l_9I0aNYZ": {
+ "children": ["ROW-b7USYEngT", "ROW-kNjtGVFpp", "ROW-s3l4os7YY"],
+ "id": "TAB-l_9I0aNYZ",
+ "meta": {"text": "\ud83d\udcbb Current Developers"},
+ "parents": ["ROOT_ID", "GRID_ID", "TABS-L-d9eyOE-b"],
+ "type": "TAB",
+ },
+ "TABS-L-d9eyOE-b": {
+ "children": ["TAB-AsMaxdYL_t", "TAB-YT6eNksV-", "TAB-l_9I0aNYZ"],
+ "id": "TABS-L-d9eyOE-b",
+ "meta": {},
+ "parents": ["ROOT_ID", "GRID_ID"],
+ "type": "TABS",
+ },
+ }
+
+ with app.app_context():
+ dash = create_dashboard(
+ "multi_tabs_test", "multiple tabs Test", json.dumps(position_json), None
+ )
+ yield dash
diff --git a/tests/integration_tests/reports/api_tests.py b/tests/integration_tests/reports/api_tests.py
index 7664dc4584e0e..55b333b8137a1 100644
--- a/tests/integration_tests/reports/api_tests.py
+++ b/tests/integration_tests/reports/api_tests.py
@@ -49,6 +49,9 @@
load_birth_names_dashboard_with_slices, # noqa: F401
load_birth_names_data, # noqa: F401
)
+from tests.integration_tests.fixtures.dashboard_with_tabs import (
+ load_mutltiple_tabs_dashboard, # noqa: F401
+)
from tests.integration_tests.reports.utils import insert_report_schedule
REPORTS_COUNT = 10
@@ -1972,3 +1975,79 @@ def test_report_schedule_logs_no_mutations(self):
assert rv.status_code == 405
rv = self.client.delete(uri)
assert rv.status_code == 405
+
+ @with_feature_flags(ALERT_REPORT_TABS=True)
+ @pytest.mark.usefixtures(
+ "load_birth_names_dashboard_with_slices", "create_report_schedules"
+ )
+ def test_create_report_schedule_with_invalid_anchors(self):
+ """
+ ReportSchedule Api: Test get report schedule 404s when feature is disabled
+ """
+ report_schedule = db.session.query(Dashboard).first()
+ get_example_database() # noqa: F841
+ anchors = ["TAB-AsMaxdYL_t", "TAB-YT6eNksV-", "TAB-l_9I0aNYZ"]
+ report_schedule_data = {
+ "type": ReportScheduleType.REPORT,
+ "name": "random_name1",
+ "description": "description",
+ "creation_method": ReportCreationMethod.ALERTS_REPORTS,
+ "crontab": "0 9 * * *",
+ "working_timeout": 3600,
+ "dashboard": report_schedule.id,
+ "extra": {"dashboard": {"anchor": json.dumps(anchors)}},
+ }
+
+ self.login(ADMIN_USERNAME)
+ uri = "api/v1/report/"
+ rv = self.post_assert_metric(uri, report_schedule_data, "post")
+ data = json.loads(rv.data.decode("utf-8"))
+ assert rv.status_code == 422
+ assert "message" in data
+ assert "extra" in data["message"]
+ assert all(anchor in data["message"]["extra"][0] for anchor in anchors) is True
+
+ @with_feature_flags(ALERT_REPORT_TABS=True)
+ @pytest.mark.usefixtures("load_mutltiple_tabs_dashboard", "create_report_schedules")
+ def test_create_report_schedule_with_multiple_anchors(self):
+ """
+ ReportSchedule Api: Test report schedule with all tabs
+ """
+ report_dashboard = (
+ db.session.query(Dashboard)
+ .filter(Dashboard.slug == "multi_tabs_test")
+ .first()
+ )
+ get_example_database() # noqa: F841
+
+ self.login(ADMIN_USERNAME)
+ tabs_uri = f"/api/v1/dashboard/{report_dashboard.id}/tabs"
+ rv = self.client.get(tabs_uri)
+ data = json.loads(rv.data.decode("utf-8"))
+
+ tabs_keys = list(data.get("result").get("all_tabs").keys())
+ extra_json = {"dashboard": {"anchor": json.dumps(tabs_keys)}}
+
+ report_schedule_data = {
+ "type": ReportScheduleType.REPORT,
+ "name": "random_name2",
+ "description": "description",
+ "creation_method": ReportCreationMethod.ALERTS_REPORTS,
+ "crontab": "0 9 * * *",
+ "working_timeout": 3600,
+ "dashboard": report_dashboard.id,
+ "extra": extra_json,
+ }
+
+ uri = "api/v1/report/"
+ rv = self.post_assert_metric(uri, report_schedule_data, "post")
+ data = json.loads(rv.data.decode("utf-8"))
+ assert rv.status_code == 201
+
+ report_schedule = (
+ db.session.query(ReportSchedule)
+ .filter(ReportSchedule.dashboard_id == report_dashboard.id)
+ .first()
+ )
+
+ assert json.loads(report_schedule.extra_json) == extra_json
diff --git a/tests/integration_tests/reports/commands_tests.py b/tests/integration_tests/reports/commands_tests.py
index 575c8a02b9d46..f18d454bb578f 100644
--- a/tests/integration_tests/reports/commands_tests.py
+++ b/tests/integration_tests/reports/commands_tests.py
@@ -1760,6 +1760,7 @@ def test_email_dashboard_report_fails_uncaught_exception(
screenshot_mock.return_value = SCREENSHOT_FILE
email_mock.side_effect = Exception("Uncaught exception")
+ app.config["EMAIL_REPORTS_CTA"] = "Call to action"
with pytest.raises(Exception):
AsyncExecuteReportScheduleCommand(
@@ -1767,6 +1768,11 @@ def test_email_dashboard_report_fails_uncaught_exception(
).run()
assert_log(ReportState.ERROR, error_message="Uncaught exception")
+ assert (
+ 'Call to action ' in email_mock.call_args[0][2]
+ )
@pytest.mark.usefixtures(
diff --git a/tests/integration_tests/sqla_models_tests.py b/tests/integration_tests/sqla_models_tests.py
index 2d7f6bf041bdd..d4ca3bc1c1a47 100644
--- a/tests/integration_tests/sqla_models_tests.py
+++ b/tests/integration_tests/sqla_models_tests.py
@@ -15,11 +15,13 @@
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
+from __future__ import annotations
+
import re
from datetime import datetime
-from typing import Any, NamedTuple, Optional, Union
+from typing import Any, Literal, NamedTuple, Optional, Union
from re import Pattern
-from unittest.mock import patch
+from unittest.mock import Mock, patch
import pytest
import numpy as np
@@ -911,6 +913,101 @@ def test_extra_cache_keys_in_sql_expression(
assert extra_cache_keys == expected_cache_keys
+@pytest.mark.usefixtures("app_context")
+@pytest.mark.parametrize(
+ "sql_expression,expected_cache_keys,has_extra_cache_keys,item_type",
+ [
+ ("'{{ current_username() }}'", ["abc"], True, "columns"),
+ ("(user != 'abc')", [], False, "columns"),
+ ("{{ current_user_id() }}", [1], True, "metrics"),
+ ("COUNT(*)", [], False, "metrics"),
+ ],
+)
+@patch("superset.jinja_context.get_user_id", return_value=1)
+@patch("superset.jinja_context.get_username", return_value="abc")
+def test_extra_cache_keys_in_adhoc_metrics_and_columns(
+ mock_username: Mock,
+ mock_user_id: Mock,
+ sql_expression: str,
+ expected_cache_keys: list[str | None],
+ has_extra_cache_keys: bool,
+ item_type: Literal["columns", "metrics"],
+):
+ table = SqlaTable(
+ table_name="test_has_no_extra_cache_keys_table",
+ sql="SELECT 'abc' as user",
+ database=get_example_database(),
+ )
+ base_query_obj: dict[str, Any] = {
+ "granularity": None,
+ "from_dttm": None,
+ "to_dttm": None,
+ "groupby": [],
+ "metrics": [],
+ "columns": [],
+ "is_timeseries": False,
+ "filter": [],
+ }
+
+ items: dict[str, Any] = {
+ item_type: [
+ {
+ "label": None,
+ "expressionType": "SQL",
+ "sqlExpression": sql_expression,
+ }
+ ],
+ }
+
+ query_obj = {**base_query_obj, **items}
+
+ extra_cache_keys = table.get_extra_cache_keys(query_obj)
+ assert table.has_extra_cache_key_calls(query_obj) == has_extra_cache_keys
+ assert extra_cache_keys == expected_cache_keys
+
+
+@pytest.mark.usefixtures("app_context")
+@patch("superset.jinja_context.get_user_id", return_value=1)
+@patch("superset.jinja_context.get_username", return_value="abc")
+def test_extra_cache_keys_in_dataset_metrics_and_columns(
+ mock_username: Mock,
+ mock_user_id: Mock,
+):
+ table = SqlaTable(
+ table_name="test_has_no_extra_cache_keys_table",
+ sql="SELECT 'abc' as user",
+ database=get_example_database(),
+ columns=[
+ TableColumn(column_name="user", type="VARCHAR(255)"),
+ TableColumn(
+ column_name="username",
+ type="VARCHAR(255)",
+ expression="{{ current_username() }}",
+ ),
+ ],
+ metrics=[
+ SqlMetric(
+ metric_name="variable_profit",
+ expression="SUM(price) * {{ url_param('multiplier') }}",
+ ),
+ ],
+ )
+ query_obj: dict[str, Any] = {
+ "granularity": None,
+ "from_dttm": None,
+ "to_dttm": None,
+ "groupby": [],
+ "columns": ["username"],
+ "metrics": ["variable_profit"],
+ "is_timeseries": False,
+ "filter": [],
+ }
+
+ extra_cache_keys = table.get_extra_cache_keys(query_obj)
+ assert table.has_extra_cache_key_calls(query_obj) is True
+ assert set(extra_cache_keys) == {"abc", None}
+
+
@pytest.mark.usefixtures("app_context")
@pytest.mark.parametrize(
"row,dimension,result",
diff --git a/tests/unit_tests/commands/report/execute_test.py b/tests/unit_tests/commands/report/execute_test.py
index b7b545fd4a6e5..3d49bb0457d06 100644
--- a/tests/unit_tests/commands/report/execute_test.py
+++ b/tests/unit_tests/commands/report/execute_test.py
@@ -15,15 +15,21 @@
# specific language governing permissions and limitations
# under the License.
+import json
+from unittest.mock import patch
+
+import pytest
from pytest_mock import MockerFixture
from superset.commands.report.execute import BaseReportState
+from superset.dashboards.permalink.types import DashboardPermalinkState
from superset.reports.models import (
ReportRecipientType,
ReportSchedule,
ReportSourceFormat,
)
from superset.utils.core import HeaderDataType
+from tests.integration_tests.conftest import with_feature_flags
def test_log_data_with_chart(mocker: MockerFixture) -> None:
@@ -220,3 +226,142 @@ def test_log_data_with_missing_values(mocker: MockerFixture) -> None:
}
assert result == expected_result
+
+
+@pytest.mark.parametrize(
+ "anchors, permalink_side_effect, expected_uris",
+ [
+ # Test user select multiple tabs to export in a dashboard report
+ (
+ ["mock_tab_anchor_1", "mock_tab_anchor_2"],
+ ["url1", "url2"],
+ [
+ "http://0.0.0.0:8080/superset/dashboard/p/url1/",
+ "http://0.0.0.0:8080/superset/dashboard/p/url2/",
+ ],
+ ),
+ # Test user select one tab to export in a dashboard report
+ (
+ "mock_tab_anchor_1",
+ ["url1"],
+ ["http://0.0.0.0:8080/superset/dashboard/p/url1/"],
+ ),
+ ],
+)
+@patch(
+ "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run"
+)
+@with_feature_flags(ALERT_REPORT_TABS=True)
+def test_get_dashboard_urls_with_multiple_tabs(
+ mock_run, mocker: MockerFixture, anchors, permalink_side_effect, expected_uris
+) -> None:
+ mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule)
+ mock_report_schedule.chart = False
+ mock_report_schedule.chart_id = None
+ mock_report_schedule.dashboard_id = 123
+ mock_report_schedule.type = "report_type"
+ mock_report_schedule.report_format = "report_format"
+ mock_report_schedule.owners = [1, 2]
+ mock_report_schedule.recipients = []
+ mock_report_schedule.extra = {
+ "dashboard": {
+ "anchor": json.dumps(anchors) if isinstance(anchors, list) else anchors,
+ "dataMask": None,
+ "activeTabs": None,
+ "urlParams": None,
+ }
+ }
+
+ class_instance: BaseReportState = BaseReportState(
+ mock_report_schedule, "January 1, 2021", "execution_id_example"
+ )
+ class_instance._report_schedule = mock_report_schedule
+ mock_run.side_effect = permalink_side_effect
+
+ result: list[str] = class_instance.get_dashboard_urls()
+
+ assert result == expected_uris
+
+
+@patch(
+ "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run"
+)
+@with_feature_flags(ALERT_REPORT_TABS=True)
+def test_get_dashboard_urls_with_exporting_dashboard_only(
+ mock_run,
+ mocker: MockerFixture,
+) -> None:
+ mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule)
+ mock_report_schedule.chart = False
+ mock_report_schedule.chart_id = None
+ mock_report_schedule.dashboard_id = 123
+ mock_report_schedule.type = "report_type"
+ mock_report_schedule.report_format = "report_format"
+ mock_report_schedule.owners = [1, 2]
+ mock_report_schedule.recipients = []
+ mock_report_schedule.extra = {
+ "dashboard": {
+ "anchor": "",
+ "dataMask": None,
+ "activeTabs": None,
+ "urlParams": None,
+ }
+ }
+ mock_run.return_value = "url1"
+
+ class_instance: BaseReportState = BaseReportState(
+ mock_report_schedule, "January 1, 2021", "execution_id_example"
+ )
+ class_instance._report_schedule = mock_report_schedule
+
+ result: list[str] = class_instance.get_dashboard_urls()
+
+ assert "http://0.0.0.0:8080/superset/dashboard/p/url1/" == result[0]
+
+
+@patch(
+ "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run"
+)
+def test_get_tab_urls(
+ mock_run,
+ mocker: MockerFixture,
+) -> None:
+ mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule)
+ mock_report_schedule.dashboard_id = 123
+
+ class_instance: BaseReportState = BaseReportState(
+ mock_report_schedule, "January 1, 2021", "execution_id_example"
+ )
+ class_instance._report_schedule = mock_report_schedule
+ mock_run.side_effect = ["uri1", "uri2"]
+ tab_anchors = ["1", "2"]
+ result: list[str] = class_instance._get_tabs_urls(tab_anchors)
+ assert result == [
+ "http://0.0.0.0:8080/superset/dashboard/p/uri1/",
+ "http://0.0.0.0:8080/superset/dashboard/p/uri2/",
+ ]
+
+
+@patch(
+ "superset.commands.dashboard.permalink.create.CreateDashboardPermalinkCommand.run"
+)
+def test_get_tab_url(
+ mock_run,
+ mocker: MockerFixture,
+) -> None:
+ mock_report_schedule: ReportSchedule = mocker.Mock(spec=ReportSchedule)
+ mock_report_schedule.dashboard_id = 123
+
+ class_instance: BaseReportState = BaseReportState(
+ mock_report_schedule, "January 1, 2021", "execution_id_example"
+ )
+ class_instance._report_schedule = mock_report_schedule
+ mock_run.return_value = "uri"
+ dashboard_state = DashboardPermalinkState(
+ anchor="1",
+ dataMask=None,
+ activeTabs=None,
+ urlParams=None,
+ )
+ result: str = class_instance._get_tab_url(dashboard_state)
+ assert result == "http://0.0.0.0:8080/superset/dashboard/p/uri/"
diff --git a/tests/unit_tests/db_engine_specs/test_denodo.py b/tests/unit_tests/db_engine_specs/test_denodo.py
new file mode 100644
index 0000000000000..31e9c0dea0d96
--- /dev/null
+++ b/tests/unit_tests/db_engine_specs/test_denodo.py
@@ -0,0 +1,146 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+from datetime import datetime
+from typing import Any, Optional
+
+import pytest
+from sqlalchemy import column, types
+from sqlalchemy.engine.url import make_url
+
+from superset.db_engine_specs.denodo import DenodoEngineSpec as spec
+from superset.utils.core import GenericDataType
+from tests.unit_tests.db_engine_specs.utils import (
+ assert_column_spec,
+ assert_convert_dttm,
+)
+from tests.unit_tests.fixtures.common import dttm # noqa: F401
+
+
+@pytest.mark.parametrize(
+ "target_type,expected_result",
+ [
+ ("Date", "TO_DATE('yyyy-MM-dd', '2019-01-02')"),
+ (
+ "DateTime",
+ "TO_TIMESTAMP('yyyy-MM-dd HH:mm:ss.SSS', '2019-01-02 03:04:05.678')",
+ ),
+ (
+ "TimeStamp",
+ "TO_TIMESTAMP('yyyy-MM-dd HH:mm:ss.SSS', '2019-01-02 03:04:05.678')",
+ ),
+ ("UnknownType", None),
+ ],
+)
+def test_convert_dttm(
+ target_type: str,
+ expected_result: Optional[str],
+ dttm: datetime, # noqa: F811
+) -> None:
+ assert_convert_dttm(spec, target_type, expected_result, dttm)
+
+
+def test_epoch_to_dttm(
+ dttm: datetime, # noqa: F811
+) -> None:
+ assert isinstance(dttm, datetime)
+ assert (
+ spec.epoch_to_dttm().format(col="epoch_dttm") == "GETTIMEFROMMILLIS(epoch_dttm)"
+ )
+
+
+@pytest.mark.parametrize(
+ "native_type,sqla_type,attrs,generic_type,is_dttm",
+ [
+ ("SMALLINT", types.SmallInteger, None, GenericDataType.NUMERIC, False),
+ ("INTEGER", types.Integer, None, GenericDataType.NUMERIC, False),
+ ("BIGINT", types.BigInteger, None, GenericDataType.NUMERIC, False),
+ ("DECIMAL", types.Numeric, None, GenericDataType.NUMERIC, False),
+ ("NUMERIC", types.Numeric, None, GenericDataType.NUMERIC, False),
+ ("REAL", types.REAL, None, GenericDataType.NUMERIC, False),
+ ("MONEY", types.Numeric, None, GenericDataType.NUMERIC, False),
+ # String
+ ("CHAR", types.String, None, GenericDataType.STRING, False),
+ ("VARCHAR", types.String, None, GenericDataType.STRING, False),
+ ("TEXT", types.String, None, GenericDataType.STRING, False),
+ # Temporal
+ ("DATE", types.Date, None, GenericDataType.TEMPORAL, True),
+ ("TIMESTAMP", types.TIMESTAMP, None, GenericDataType.TEMPORAL, True),
+ ("TIME", types.Time, None, GenericDataType.TEMPORAL, True),
+ # Boolean
+ ("BOOLEAN", types.Boolean, None, GenericDataType.BOOLEAN, False),
+ ],
+)
+def test_get_column_spec(
+ native_type: str,
+ sqla_type: type[types.TypeEngine],
+ attrs: Optional[dict[str, Any]],
+ generic_type: GenericDataType,
+ is_dttm: bool,
+) -> None:
+ assert_column_spec(spec, native_type, sqla_type, attrs, generic_type, is_dttm)
+
+
+def test_get_schema_from_engine_params() -> None:
+ """
+ Test the ``get_schema_from_engine_params`` method.
+ Should return None.
+ """
+
+ assert (
+ spec.get_schema_from_engine_params(
+ make_url("denodo://user:password@host/db"), {}
+ )
+ is None
+ )
+
+
+def test_get_default_catalog() -> None:
+ """
+ Test ``get_default_catalog``.
+ Should return None.
+ """
+ from superset.models.core import Database
+
+ database = Database(
+ database_name="denodo",
+ sqlalchemy_uri="denodo://user:password@host:9996/db",
+ )
+ assert spec.get_default_catalog(database) is None
+
+
+@pytest.mark.parametrize(
+ "time_grain,expected_result",
+ [
+ (None, "col"),
+ ("PT1M", "TRUNC(col,'MI')"),
+ ("PT1H", "TRUNC(col,'HH')"),
+ ("P1D", "TRUNC(col,'DDD')"),
+ ("P1W", "TRUNC(col,'W')"),
+ ("P1M", "TRUNC(col,'MONTH')"),
+ ("P3M", "TRUNC(col,'Q')"),
+ ("P1Y", "TRUNC(col,'YEAR')"),
+ ],
+)
+def test_timegrain_expressions(time_grain: str, expected_result: str) -> None:
+ """
+ DB Eng Specs (denodo): Test time grain expressions
+ """
+ actual = str(
+ spec.get_timestamp_expr(col=column("col"), pdf=None, time_grain=time_grain)
+ )
+ assert actual == expected_result
diff --git a/tests/unit_tests/db_engine_specs/test_gsheets.py b/tests/unit_tests/db_engine_specs/test_gsheets.py
index 5d2ddb807bbc1..4e17054db9e63 100644
--- a/tests/unit_tests/db_engine_specs/test_gsheets.py
+++ b/tests/unit_tests/db_engine_specs/test_gsheets.py
@@ -559,6 +559,7 @@ def oauth2_config() -> OAuth2ClientConfig:
"redirect_uri": "http://localhost:8088/api/v1/oauth2/",
"authorization_request_uri": "https://accounts.google.com/o/oauth2/v2/auth",
"token_request_uri": "https://oauth2.googleapis.com/token",
+ "request_content_type": "json",
}
diff --git a/tests/unit_tests/db_engine_specs/test_impala.py b/tests/unit_tests/db_engine_specs/test_impala.py
index efaed81cba7ad..543db243684c5 100644
--- a/tests/unit_tests/db_engine_specs/test_impala.py
+++ b/tests/unit_tests/db_engine_specs/test_impala.py
@@ -17,9 +17,13 @@
from datetime import datetime
from typing import Optional
+from unittest.mock import Mock, patch
import pytest
+from superset.db_engine_specs.impala import ImpalaEngineSpec as spec
+from superset.models.core import Database
+from superset.models.sql_lab import Query
from tests.unit_tests.db_engine_specs.utils import assert_convert_dttm
from tests.unit_tests.fixtures.common import dttm # noqa: F401
@@ -37,6 +41,77 @@ def test_convert_dttm(
expected_result: Optional[str],
dttm: datetime, # noqa: F811
) -> None:
- from superset.db_engine_specs.impala import ImpalaEngineSpec as spec
-
assert_convert_dttm(spec, target_type, expected_result, dttm)
+
+
+def test_get_cancel_query_id() -> None:
+ query = Query()
+
+ cursor_mock = Mock()
+ last_operation_mock = Mock()
+ cursor_mock._last_operation = last_operation_mock
+
+ guid = bytes(reversed(bytes.fromhex("9fbdba20000000006940643a2731718b")))
+ last_operation_mock.handle.operationId.guid = guid
+
+ assert (
+ spec.get_cancel_query_id(cursor_mock, query)
+ == "6940643a2731718b:9fbdba2000000000"
+ )
+
+
+@patch("requests.post")
+def test_cancel_query(post_mock: Mock) -> None:
+ query = Query()
+ database = Database(
+ database_name="test_impala", sqlalchemy_uri="impala://localhost:21050/default"
+ )
+ query.database = database
+
+ response_mock = Mock()
+ response_mock.status_code = 200
+ post_mock.return_value = response_mock
+
+ result = spec.cancel_query(None, query, "6940643a2731718b:9fbdba2000000000")
+
+ post_mock.assert_called_once_with(
+ "http://localhost:25000/cancel_query?query_id=6940643a2731718b:9fbdba2000000000",
+ timeout=3,
+ )
+ assert result is True
+
+
+@patch("requests.post")
+def test_cancel_query_failed(post_mock: Mock) -> None:
+ query = Query()
+ database = Database(
+ database_name="test_impala", sqlalchemy_uri="impala://localhost:21050/default"
+ )
+ query.database = database
+
+ response_mock = Mock()
+ response_mock.status_code = 500
+ post_mock.return_value = response_mock
+
+ result = spec.cancel_query(None, query, "6940643a2731718b:9fbdba2000000000")
+
+ post_mock.assert_called_once_with(
+ "http://localhost:25000/cancel_query?query_id=6940643a2731718b:9fbdba2000000000",
+ timeout=3,
+ )
+ assert result is False
+
+
+@patch("requests.post")
+def test_cancel_query_exception(post_mock: Mock) -> None:
+ query = Query()
+ database = Database(
+ database_name="test_impala", sqlalchemy_uri="impala://localhost:21050/default"
+ )
+ query.database = database
+
+ post_mock.side_effect = Exception("Network error")
+
+ result = spec.cancel_query(None, query, "6940643a2731718b:9fbdba2000000000")
+
+ assert result is False
diff --git a/tests/unit_tests/db_engine_specs/test_mssql.py b/tests/unit_tests/db_engine_specs/test_mssql.py
index 38a5603e4ec93..0a3760a47f1fa 100644
--- a/tests/unit_tests/db_engine_specs/test_mssql.py
+++ b/tests/unit_tests/db_engine_specs/test_mssql.py
@@ -27,6 +27,7 @@
from sqlalchemy.types import String, TypeEngine, UnicodeText
from superset.errors import ErrorLevel, SupersetError, SupersetErrorType
+from superset.models.sql_types.mssql_sql_types import GUID
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
assert_column_spec,
@@ -46,6 +47,7 @@
("NCHAR(10)", UnicodeText, None, GenericDataType.STRING, False),
("NVARCHAR(10)", UnicodeText, None, GenericDataType.STRING, False),
("NTEXT", UnicodeText, None, GenericDataType.STRING, False),
+ ("uniqueidentifier", GUID, None, GenericDataType.STRING, False),
],
)
def test_get_column_spec(
diff --git a/tests/unit_tests/db_engine_specs/test_trino.py b/tests/unit_tests/db_engine_specs/test_trino.py
index 5a32cd05044cd..b616adfcf139e 100644
--- a/tests/unit_tests/db_engine_specs/test_trino.py
+++ b/tests/unit_tests/db_engine_specs/test_trino.py
@@ -45,7 +45,12 @@
SupersetDBAPIProgrammingError,
)
from superset.sql_parse import Table
-from superset.superset_typing import ResultSetColumnType, SQLAColumnType, SQLType
+from superset.superset_typing import (
+ OAuth2ClientConfig,
+ ResultSetColumnType,
+ SQLAColumnType,
+ SQLType,
+)
from superset.utils import json
from superset.utils.core import GenericDataType
from tests.unit_tests.db_engine_specs.utils import (
@@ -421,21 +426,23 @@ def test_execute_with_cursor_in_parallel(app, mocker: MockerFixture):
def _mock_execute(*args, **kwargs):
mock_cursor.query_id = query_id
- mock_cursor.execute.side_effect = _mock_execute
- with patch.dict(
- "superset.config.DISALLOWED_SQL_FUNCTIONS",
- {},
- clear=True,
- ):
- TrinoEngineSpec.execute_with_cursor(
- cursor=mock_cursor,
- sql="SELECT 1 FROM foo",
- query=mock_query,
- )
+ with app.test_request_context("/some/place/"):
+ mock_cursor.execute.side_effect = _mock_execute
- mock_query.set_extra_json_key.assert_called_once_with(
- key=QUERY_CANCEL_KEY, value=query_id
- )
+ with patch.dict(
+ "superset.config.DISALLOWED_SQL_FUNCTIONS",
+ {},
+ clear=True,
+ ):
+ TrinoEngineSpec.execute_with_cursor(
+ cursor=mock_cursor,
+ sql="SELECT 1 FROM foo",
+ query=mock_query,
+ )
+
+ mock_query.set_extra_json_key.assert_called_once_with(
+ key=QUERY_CANCEL_KEY, value=query_id
+ )
def test_execute_with_cursor_app_context(app, mocker: MockerFixture):
@@ -446,23 +453,25 @@ def test_execute_with_cursor_app_context(app, mocker: MockerFixture):
mock_cursor.query_id = None
mock_query = mocker.MagicMock()
- g.some_value = "some_value"
def _mock_execute(*args, **kwargs):
assert has_app_context()
assert g.some_value == "some_value"
- with patch.object(TrinoEngineSpec, "execute", side_effect=_mock_execute):
- with patch.dict(
- "superset.config.DISALLOWED_SQL_FUNCTIONS",
- {},
- clear=True,
- ):
- TrinoEngineSpec.execute_with_cursor(
- cursor=mock_cursor,
- sql="SELECT 1 FROM foo",
- query=mock_query,
- )
+ with app.test_request_context("/some/place/"):
+ g.some_value = "some_value"
+
+ with patch.object(TrinoEngineSpec, "execute", side_effect=_mock_execute):
+ with patch.dict(
+ "superset.config.DISALLOWED_SQL_FUNCTIONS",
+ {},
+ clear=True,
+ ):
+ TrinoEngineSpec.execute_with_cursor(
+ cursor=mock_cursor,
+ sql="SELECT 1 FROM foo",
+ query=mock_query,
+ )
def test_get_columns(mocker: MockerFixture):
@@ -784,3 +793,57 @@ def test_where_latest_partition(
)
== f"""SELECT * FROM table \nWHERE partition_key = {expected_value}"""
)
+
+
+@pytest.fixture
+def oauth2_config() -> OAuth2ClientConfig:
+ """
+ Config for Trino OAuth2.
+ """
+ return {
+ "id": "trino",
+ "secret": "very-secret",
+ "scope": "",
+ "redirect_uri": "http://localhost:8088/api/v1/database/oauth2/",
+ "authorization_request_uri": "https://trino.auth.server.example/realms/master/protocol/openid-connect/auth",
+ "token_request_uri": "https://trino.auth.server.example/master/protocol/openid-connect/token",
+ "request_content_type": "data",
+ }
+
+
+def test_get_oauth2_token(
+ mocker: MockerFixture,
+ oauth2_config: OAuth2ClientConfig,
+) -> None:
+ """
+ Test `get_oauth2_token`.
+ """
+ from superset.db_engine_specs.trino import TrinoEngineSpec
+
+ requests = mocker.patch("superset.db_engine_specs.base.requests")
+ requests.post().json.return_value = {
+ "access_token": "access-token",
+ "expires_in": 3600,
+ "scope": "scope",
+ "token_type": "Bearer",
+ "refresh_token": "refresh-token",
+ }
+
+ assert TrinoEngineSpec.get_oauth2_token(oauth2_config, "code") == {
+ "access_token": "access-token",
+ "expires_in": 3600,
+ "scope": "scope",
+ "token_type": "Bearer",
+ "refresh_token": "refresh-token",
+ }
+ requests.post.assert_called_with(
+ "https://trino.auth.server.example/master/protocol/openid-connect/token",
+ data={
+ "code": "code",
+ "client_id": "trino",
+ "client_secret": "very-secret",
+ "redirect_uri": "http://localhost:8088/api/v1/database/oauth2/",
+ "grant_type": "authorization_code",
+ },
+ timeout=30.0,
+ )
diff --git a/tests/unit_tests/models/core_test.py b/tests/unit_tests/models/core_test.py
index 3c591d4466f74..1dff4784ec9a5 100644
--- a/tests/unit_tests/models/core_test.py
+++ b/tests/unit_tests/models/core_test.py
@@ -432,6 +432,31 @@ def test_get_sqla_engine_user_impersonation(mocker: MockerFixture) -> None:
)
+def test_add_database_to_signature():
+ args = ["param1", "param2"]
+
+ def func_without_db(param1, param2):
+ pass
+
+ def func_with_db_start(database, param1, param2):
+ pass
+
+ def func_with_db_end(param1, param2, database):
+ pass
+
+ database = Database(
+ database_name="my_db",
+ sqlalchemy_uri="trino://",
+ impersonate_user=True,
+ )
+ args1 = database.add_database_to_signature(func_without_db, args.copy())
+ assert args1 == ["param1", "param2"]
+ args2 = database.add_database_to_signature(func_with_db_start, args.copy())
+ assert args2 == [database, "param1", "param2"]
+ args3 = database.add_database_to_signature(func_with_db_end, args.copy())
+ assert args3 == ["param1", "param2", database]
+
+
@with_feature_flags(IMPERSONATE_WITH_EMAIL_PREFIX=True)
def test_get_sqla_engine_user_impersonation_email(mocker: MockerFixture) -> None:
"""
@@ -496,6 +521,7 @@ def test_get_oauth2_config(app_context: None) -> None:
"token_request_uri": "https://abcd1234.snowflakecomputing.com/oauth/token-request",
"scope": "refresh_token session:role:USERADMIN",
"redirect_uri": "http://example.com/api/v1/database/oauth2/",
+ "request_content_type": "json",
}
diff --git a/tests/unit_tests/pandas_postprocessing/test_compare.py b/tests/unit_tests/pandas_postprocessing/test_compare.py
index 9da8a31535470..a26aa11d290ea 100644
--- a/tests/unit_tests/pandas_postprocessing/test_compare.py
+++ b/tests/unit_tests/pandas_postprocessing/test_compare.py
@@ -14,6 +14,9 @@
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
+import io
+import sys
+
import pandas as pd
from superset.constants import PandasPostprocessingCompare as PPC
@@ -179,6 +182,70 @@ def test_compare_multi_index_column():
)
+def test_compare_multi_index_column_non_lex_sorted():
+ index = pd.to_datetime(["2021-01-01", "2021-01-02", "2021-01-03"])
+ index.name = "__timestamp"
+
+ iterables = [["m1", "m2"], ["a", "b"], ["x", "y"]]
+ columns = pd.MultiIndex.from_product(iterables, names=[None, "level1", "level2"])
+
+ df = pd.DataFrame(index=index, columns=columns, data=1)
+
+ # Define a non-lexicographical column order
+ # arrange them as m1, m2 instead of m2, m1
+ new_columns_order = [
+ ("m1", "a", "x"),
+ ("m1", "a", "y"),
+ ("m1", "b", "x"),
+ ("m1", "b", "y"),
+ ("m2", "a", "x"),
+ ("m2", "a", "y"),
+ ("m2", "b", "x"),
+ ("m2", "b", "y"),
+ ]
+
+ df.columns = pd.MultiIndex.from_tuples(
+ new_columns_order, names=["level1", "level2", None]
+ )
+
+ # to capture stderr
+ stderr = sys.stderr
+ sys.stderr = io.StringIO()
+
+ try:
+ post_df = pp.compare(
+ df,
+ source_columns=["m1"],
+ compare_columns=["m2"],
+ compare_type=PPC.DIFF,
+ drop_original_columns=True,
+ )
+ assert sys.stderr.getvalue() == ""
+ finally:
+ sys.stderr = stderr
+
+ flat_df = pp.flatten(post_df)
+ """
+ __timestamp difference__m1__m2, a, x difference__m1__m2, a, y difference__m1__m2, b, x difference__m1__m2, b, y
+ 0 2021-01-01 0 0 0 0
+ 1 2021-01-02 0 0 0 0
+ 2 2021-01-03 0 0 0 0
+ """
+ assert flat_df.equals(
+ pd.DataFrame(
+ data={
+ "__timestamp": pd.to_datetime(
+ ["2021-01-01", "2021-01-02", "2021-01-03"]
+ ),
+ "difference__m1__m2, a, x": [0, 0, 0],
+ "difference__m1__m2, a, y": [0, 0, 0],
+ "difference__m1__m2, b, x": [0, 0, 0],
+ "difference__m1__m2, b, y": [0, 0, 0],
+ }
+ )
+ )
+
+
def test_compare_after_pivot():
pivot_df = pp.pivot(
df=multiple_metrics_df,