From a4d97d2a19eabbe1cc01ead2c3b2b4bdab97cb34 Mon Sep 17 00:00:00 2001 From: Huy Do Date: Tue, 15 Oct 2024 17:46:06 -0700 Subject: [PATCH] [CH] Migrate TorchInductor benchmark page --- .../params.json | 11 +- .../compilers_benchmark_performance/query.sql | 324 ++++++++++-------- .../params.json | 3 +- .../query.sql | 40 ++- .../benchmark/compilers/ModelGraphPanel.tsx | 27 +- .../benchmark/compilers/SummaryGraphPanel.tsx | 41 +-- .../benchmark/llms/ModelGraphPanel.tsx | 8 +- .../[suite]/[compiler]/[[...page]].tsx | 114 ++---- torchci/pages/benchmark/compilers.tsx | 100 ++---- torchci/pages/benchmark/llms.tsx | 1 - 10 files changed, 287 insertions(+), 382 deletions(-) diff --git a/torchci/clickhouse_queries/compilers_benchmark_performance/params.json b/torchci/clickhouse_queries/compilers_benchmark_performance/params.json index f91e82cbb1..244ba8b8de 100644 --- a/torchci/clickhouse_queries/compilers_benchmark_performance/params.json +++ b/torchci/clickhouse_queries/compilers_benchmark_performance/params.json @@ -1,7 +1,7 @@ { - "branches": "String", - "commits": "String", - "compilers": "String", + "branches": "Array(String)", + "commits": "Array(String)", + "compilers": "Array(String)", "device": "String", "dtypes": "String", "getJobId": "Bool", @@ -9,7 +9,6 @@ "mode": "String", "startTime": "DateTime64(3)", "stopTime": "DateTime64(3)", - "suites": "String", - "timezone": "String", + "suites": "Array(String)", "workflowId": "Int64" -} \ No newline at end of file +} diff --git a/torchci/clickhouse_queries/compilers_benchmark_performance/query.sql b/torchci/clickhouse_queries/compilers_benchmark_performance/query.sql index f9f2b50ec4..ec0a4ccc9b 100644 --- a/torchci/clickhouse_queries/compilers_benchmark_performance/query.sql +++ b/torchci/clickhouse_queries/compilers_benchmark_performance/query.sql @@ -1,152 +1,190 @@ --- !!! Query is not converted to CH syntax yet. Delete this line when it gets converted +-- This query is used to get the PT2 benchmark results from different experiments +-- to powers the TorchInductor benchmark dashboard WITH performance_results AS ( - SELECT - name, - IF(speedup = 'infra_error', NULL, speedup) AS speedup, -- Handle the recent burst of infra error - REPLACE( - filename, - CONCAT( - '_', : dtypes, '_', : mode, '_', : device, - '_performance' - ) - ) AS filename, - compilation_latency, - compression_ratio, - abs_latency, - mfu, - memory_bandwidth, - dynamo_peak_mem, - eager_peak_mem, - workflow_id, - CAST(job_id AS INT) AS job_id, - FROM - inductor.torch_dynamo_perf_stats_v2 - WHERE - filename LIKE CONCAT( - '%_', : dtypes, '_', : mode, '_', : device, - '_performance%' - ) - AND TIMESTAMP_MILLIS(timestamp) >= PARSE_DATETIME_ISO8601(:startTime) - AND TIMESTAMP_MILLIS(timestamp) < PARSE_DATETIME_ISO8601(:stopTime) - AND (workflow_id = :workflowId OR :workflowId = 0) + SELECT + name, + IF(speedup = 'infra_error', '', speedup) AS speedup, + -- Handle the recent burst of infra error + REPLACE( + filename, + CONCAT( + '_', + { dtypes: String }, + '_', + { mode: String }, + '_', + {device: String }, + '_performance' + ), + '' + ) AS replaced_filename, + compilation_latency, + compression_ratio, + abs_latency, + dynamo_peak_mem, + eager_peak_mem, + workflow_id, + toInt64(job_id) AS job_id, + timestamp + FROM + benchmark.inductor_torch_dynamo_perf_stats + WHERE + filename LIKE CONCAT( + '%_', + { dtypes: String }, + '_', + {mode: String }, + '_', + {device: String }, + '_performance%' + ) + AND timestamp >= toUnixTimestamp64Milli({startTime: DateTime64(3) }) + AND timestamp < toUnixTimestamp64Milli({stopTime: DateTime64(3) }) + AND ( + workflow_id = { workflowId: Int64 } + OR { workflowId: Int64 } = 0 + ) ), accuracy_results AS ( - SELECT - name, - accuracy, - REPLACE( - filename, - CONCAT( - '_', : dtypes, '_', : mode, '_', : device, - '_accuracy' - ) - ) AS filename, - workflow_id, - CAST(job_id AS INT) AS job_id, - FROM - inductor.torch_dynamo_perf_stats_v2 - WHERE - filename LIKE CONCAT( - '%_', : dtypes, '_', : mode, '_', : device, - '_accuracy%' - ) - AND TIMESTAMP_MILLIS(timestamp) >= PARSE_DATETIME_ISO8601(:startTime) - AND TIMESTAMP_MILLIS(timestamp) < PARSE_DATETIME_ISO8601(:stopTime) - AND (workflow_id = :workflowId OR :workflowId = 0) - AND accuracy != 'model_fail_to_load' - AND accuracy != 'eager_fail_to_run' + SELECT + name, + accuracy, + REPLACE( + filename, + CONCAT( + '_', + { dtypes: String }, + '_', + {mode: String }, + '_', + {device: String }, + '_accuracy' + ), + '' + ) AS replaced_filename, + workflow_id, + toInt64(job_id) AS job_id, + timestamp + FROM + benchmark.inductor_torch_dynamo_perf_stats + WHERE + filename LIKE CONCAT( + '%_', + { dtypes: String }, + '_', + {mode: String }, + '_', + {device: String }, + '_accuracy%' + ) + AND timestamp >= toUnixTimestamp64Milli({startTime: DateTime64(3) }) + AND timestamp < toUnixTimestamp64Milli({stopTime: DateTime64(3) }) + AND ( + workflow_id = { workflowId: Int64 } + OR { workflowId: Int64 } = 0 + ) + AND accuracy != 'model_fail_to_load' + AND accuracy != 'eager_fail_to_run' ), results AS ( - SELECT - accuracy_results.workflow_id AS workflow_id, - accuracy_results.job_id AS job_id, - CASE - WHEN accuracy_results.filename LIKE '%_torchbench' THEN 'torchbench' - WHEN accuracy_results.filename LIKE '%_timm_models' THEN 'timm_models' - WHEN accuracy_results.filename LIKE '%_huggingface' THEN 'huggingface' - ELSE NULL - END AS suite, - CASE - WHEN accuracy_results.filename LIKE '%_torchbench' THEN REPLACE( - accuracy_results.filename, '_torchbench' - ) - WHEN accuracy_results.filename LIKE '%_timm_models' THEN REPLACE( - accuracy_results.filename, '_timm_models' - ) - WHEN accuracy_results.filename LIKE '%_huggingface' THEN REPLACE( - accuracy_results.filename, '_huggingface' - ) - ELSE NULL - END AS compiler, - accuracy_results.name, - IF(TRY_CAST(speedup AS FLOAT) IS NOT NULL, - CAST(speedup AS FLOAT), - 0.0 - ) AS speedup, - accuracy, - IF(TRY_CAST(compilation_latency AS FLOAT) IS NOT NULL, - CAST(compilation_latency AS FLOAT), - 0.0 - ) AS compilation_latency, - IF(TRY_CAST(compression_ratio AS FLOAT) IS NOT NULL, - CAST(compression_ratio AS FLOAT), - 0.0 - ) AS compression_ratio, - IF(TRY_CAST(abs_latency AS FLOAT) IS NOT NULL, - CAST(abs_latency AS FLOAT), - 0.0 - ) AS abs_latency, - IF(TRY_CAST(mfu AS FLOAT) IS NOT NULL, - CAST(mfu AS FLOAT), - 0.0 - ) AS mfu, - IF(TRY_CAST(memory_bandwidth AS FLOAT) IS NOT NULL, - CAST(memory_bandwidth AS FLOAT), - 0.0 - ) AS memory_bandwidth, - IF(TRY_CAST(dynamo_peak_mem AS FLOAT) IS NOT NULL, - CAST(dynamo_peak_mem AS FLOAT), - 0.0 - ) AS dynamo_peak_mem, - IF(TRY_CAST(eager_peak_mem AS FLOAT) IS NOT NULL, - CAST(eager_peak_mem AS FLOAT), - 0.0 - ) AS eager_peak_mem, - FROM - accuracy_results - LEFT JOIN performance_results ON performance_results.name = accuracy_results.name - AND performance_results.filename = accuracy_results.filename - AND performance_results.workflow_id = accuracy_results.workflow_id + SELECT + accuracy_results.workflow_id AS workflow_id, + accuracy_results.job_id AS job_id, + CASE + WHEN accuracy_results.replaced_filename LIKE '%_torchbench' THEN 'torchbench' + WHEN accuracy_results.replaced_filename LIKE '%_timm_models' THEN 'timm_models' + WHEN accuracy_results.replaced_filename LIKE '%_huggingface' THEN 'huggingface' + ELSE NULL + END AS suite, + CASE + WHEN accuracy_results.replaced_filename LIKE '%_torchbench' THEN REPLACE( + accuracy_results.replaced_filename, + '_torchbench', + '' + ) + WHEN accuracy_results.replaced_filename LIKE '%_timm_models' THEN REPLACE( + accuracy_results.replaced_filename, + '_timm_models', + '' + ) + WHEN accuracy_results.replaced_filename LIKE '%_huggingface' THEN REPLACE( + accuracy_results.replaced_filename, + '_huggingface', + '' + ) + ELSE NULL + END AS compiler, + accuracy_results.name, + IF(speedup != '', toFloat32(speedup), 0.0) AS speedup, + accuracy, + IF( + compilation_latency != '', + toFloat32(compilation_latency), + 0.0 + ) AS compilation_latency, + IF( + compression_ratio != '', + toFloat32(compression_ratio), + 0.0 + ) AS compression_ratio, + IF(abs_latency != '', toFloat32(abs_latency), 0.0) AS abs_latency, + IF( + dynamo_peak_mem != '', + toFloat32(dynamo_peak_mem), + 0.0 + ) AS dynamo_peak_mem, + IF(eager_peak_mem != '', toFloat32(eager_peak_mem), 0.0) AS eager_peak_mem, + IF( + performance_results.timestamp != 0, + performance_results.timestamp, + accuracy_results.timestamp + ) AS timestamp + FROM + accuracy_results + LEFT JOIN performance_results ON performance_results.name = accuracy_results.name + AND performance_results.replaced_filename = accuracy_results.replaced_filename + AND performance_results.workflow_id = accuracy_results.workflow_id ) -SELECT DISTINCT - results.workflow_id, - -- As the JSON response is pretty big, only return the field if it's needed - IF(:getJobId, results.job_id, NULL) AS job_id, - results.suite, - results.compiler, - results.name, - results.speedup, - results.accuracy, - results.compilation_latency, - results.compression_ratio, - results.abs_latency, - results.mfu, - results.memory_bandwidth, - results.dynamo_peak_mem, - results.eager_peak_mem, - FORMAT_ISO8601( - DATE_TRUNC(: granularity, w._event_time) - ) AS granularity_bucket, +SELECT + DISTINCT results.workflow_id, + IF({getJobId: Bool}, results.job_id, 0) AS job_id, + results.suite, + results.compiler, + results.name, + results.speedup, + results.accuracy, + results.compilation_latency, + results.compression_ratio, + results.abs_latency, + results.dynamo_peak_mem, + results.eager_peak_mem, + DATE_TRUNC( + {granularity: String }, + fromUnixTimestamp64Milli(results.timestamp) + ) AS granularity_bucket FROM - results LEFT JOIN commons.workflow_run w ON results.workflow_id = w.id + results + LEFT JOIN default .workflow_run w FINAL ON results.workflow_id = w.id WHERE - ARRAY_CONTAINS(SPLIT(:suites, ','), LOWER(results.suite)) - AND (ARRAY_CONTAINS(SPLIT(:compilers, ','), LOWER(results.compiler)) OR :compilers = '') - AND (ARRAY_CONTAINS(SPLIT(:branches, ','), head_branch) OR :branches = '') - AND (ARRAY_CONTAINS(SPLIT(:commits, ','), head_sha) OR :commits = '') + has({suites: Array(String) }, lower(results.suite)) + AND ( + has( + {compilers: Array(String) }, + lower(results.compiler) + ) + OR empty({compilers: Array(String) }) + ) + AND ( + has({branches: Array(String) }, head_branch) + OR empty({branches: Array(String) }) + ) + AND ( + has({commits: Array(String) }, head_sha) + OR empty({commits: Array(String) }) + ) ORDER BY - granularity_bucket DESC, - workflow_id DESC, - suite ASC, - compiler ASC, - name ASC \ No newline at end of file + granularity_bucket DESC, + workflow_id DESC, + suite ASC, + compiler ASC, + name ASC diff --git a/torchci/clickhouse_queries/compilers_benchmark_performance_branches/params.json b/torchci/clickhouse_queries/compilers_benchmark_performance_branches/params.json index 3f7013e3ee..7966ebe7b0 100644 --- a/torchci/clickhouse_queries/compilers_benchmark_performance_branches/params.json +++ b/torchci/clickhouse_queries/compilers_benchmark_performance_branches/params.json @@ -1,8 +1,7 @@ { "device": "String", "dtypes": "String", - "granularity": "String", "mode": "String", "startTime": "DateTime64(3)", "stopTime": "DateTime64(3)" -} \ No newline at end of file +} diff --git a/torchci/clickhouse_queries/compilers_benchmark_performance_branches/query.sql b/torchci/clickhouse_queries/compilers_benchmark_performance_branches/query.sql index 44292fe3d7..7029538d56 100644 --- a/torchci/clickhouse_queries/compilers_benchmark_performance_branches/query.sql +++ b/torchci/clickhouse_queries/compilers_benchmark_performance_branches/query.sql @@ -1,23 +1,25 @@ --- !!! Query is not converted to CH syntax yet. Delete this line when it gets converted +-- This query is used to get the list of branches and commits used by different +-- OSS CI benchmark experiments. This powers HUD TorchInductor benchmarks dashboards SELECT - DISTINCT w.head_branch, - w.head_sha, - w.id, - FORMAT_ISO8601( - DATE_TRUNC( - : granularity, TIMESTAMP_MILLIS(p.timestamp) - ) - ) AS event_time, + DISTINCT w.head_branch AS head_branch, + w.head_sha, + w.id, + toStartOfDay(fromUnixTimestamp64Milli(p.timestamp)) AS event_time FROM - inductor.torch_dynamo_perf_stats_v2 AS p - LEFT JOIN commons.workflow_run w ON p.workflow_id = w.id + benchmark.inductor_torch_dynamo_perf_stats p + LEFT JOIN default .workflow_run w ON p.workflow_id = w.id WHERE - TIMESTAMP_MILLIS(p.timestamp) >= PARSE_DATETIME_ISO8601(: startTime) - AND TIMESTAMP_MILLIS(p.timestamp) < PARSE_DATETIME_ISO8601(: stopTime) - AND p.filename LIKE CONCAT( - '%_', : dtypes, '_', : mode, '_', : device, - '_performance%' - ) + p.timestamp >= toUnixTimestamp64Milli({startTime: DateTime64(3) }) + AND p.timestamp < toUnixTimestamp64Milli({stopTime: DateTime64(3) }) + AND p.filename LIKE CONCAT( + '%_', + {dtypes: String }, + '_', + {mode: String }, + '_', + {device: String }, + '_performance%' + ) ORDER BY - w.head_branch, - event_time DESC + w.head_branch, + event_time DESC diff --git a/torchci/components/benchmark/compilers/ModelGraphPanel.tsx b/torchci/components/benchmark/compilers/ModelGraphPanel.tsx index 90d0d8e68f..a22af2e7e4 100644 --- a/torchci/components/benchmark/compilers/ModelGraphPanel.tsx +++ b/torchci/components/benchmark/compilers/ModelGraphPanel.tsx @@ -12,7 +12,6 @@ import { import dayjs from "dayjs"; import { augmentData } from "lib/benchmark/compilerUtils"; import { fetcher } from "lib/GeneralUtils"; -import { RocksetParam } from "lib/rockset"; import { CompilerPerformanceData } from "lib/types"; import useSWR from "swr"; @@ -31,7 +30,7 @@ export function GraphPanel({ rCommit, }: { queryName: string; - queryParams: RocksetParam[]; + queryParams: { [key: string]: any }; granularity: Granularity; compiler: string; model: string; @@ -39,17 +38,11 @@ export function GraphPanel({ lCommit: string; rCommit: string; }) { - const queryCollection = "inductor"; - - const queryParamsWithBranch: RocksetParam[] = [ - { - name: "branches", - type: "string", - value: branch, - }, + const queryParamsWithBranch: { [key: string]: any } = { ...queryParams, - ]; - const url = `/api/query/${queryCollection}/${queryName}?parameters=${encodeURIComponent( + branches: [branch], + }; + const url = `/api/clickhouse/${queryName}?parameters=${encodeURIComponent( JSON.stringify(queryParamsWithBranch) )}`; @@ -67,13 +60,9 @@ export function GraphPanel({ } // Clamp to the nearest granularity (e.g. nearest hour) so that the times will - // align with the data we get from Rockset - const startTime = dayjs( - queryParams.find((p) => p.name === "startTime")?.value - ).startOf(granularity); - const stopTime = dayjs( - queryParams.find((p) => p.name === "stopTime")?.value - ).startOf(granularity); + // align with the data we get from the database + const startTime = dayjs(queryParams["startTime"]).startOf(granularity); + const stopTime = dayjs(queryParams["stopTime"]).startOf(granularity); // Only show records between these twos const lWorkflowId = COMMIT_TO_WORKFLOW_ID[lCommit]; diff --git a/torchci/components/benchmark/compilers/SummaryGraphPanel.tsx b/torchci/components/benchmark/compilers/SummaryGraphPanel.tsx index 5ba7409b73..d6208600bf 100644 --- a/torchci/components/benchmark/compilers/SummaryGraphPanel.tsx +++ b/torchci/components/benchmark/compilers/SummaryGraphPanel.tsx @@ -17,7 +17,6 @@ import { getPassingModels, } from "lib/benchmark/compilerUtils"; import { fetcher } from "lib/GeneralUtils"; -import { RocksetParam } from "lib/rockset"; import useSWR from "swr"; const GRAPH_ROW_HEIGHT = 245; @@ -32,7 +31,7 @@ export function GraphPanel({ rCommit, }: { queryName: string; - queryParams: RocksetParam[]; + queryParams: { [key: string]: any }; granularity: Granularity; suite: string; branch: string; @@ -40,7 +39,7 @@ export function GraphPanel({ rCommit: string; }) { // NB: I need to do multiple queries here for different suites to keep the response - // from Rockset small enough (<6MB) to fit into Vercel lambda limit + // from the database small enough (<6MB) to fit into Vercel lambda limit return ( p.name === "startTime")?.value - ).startOf(granularity); - const stopTime = dayjs( - queryParams.find((p) => p.name === "stopTime")?.value - ).startOf(granularity); + // align with the data we get from the database + const startTime = dayjs(queryParams["startTime"]).startOf(granularity); + const stopTime = dayjs(queryParams["stopTime"]).startOf(granularity); // Compute the metrics for all passing models const models = getPassingModels(data); diff --git a/torchci/components/benchmark/llms/ModelGraphPanel.tsx b/torchci/components/benchmark/llms/ModelGraphPanel.tsx index 03ee38c75c..3119909328 100644 --- a/torchci/components/benchmark/llms/ModelGraphPanel.tsx +++ b/torchci/components/benchmark/llms/ModelGraphPanel.tsx @@ -71,12 +71,8 @@ export function GraphPanel({ // Clamp to the nearest granularity (e.g. nearest hour) so that the times will // align with the data we get from the database - const startTime = dayjs( - (queryParams as { [key: string]: any })["startTime"] - ).startOf(granularity); - const stopTime = dayjs( - (queryParams as { [key: string]: any })["stopTime"] - ).startOf(granularity); + const startTime = dayjs(queryParams["startTime"]).startOf(granularity); + const stopTime = dayjs(queryParams["stopTime"]).startOf(granularity); // Only show records between these twos const lWorkflowId = COMMIT_TO_WORKFLOW_ID[lBranchAndCommit.commit]; diff --git a/torchci/pages/benchmark/[suite]/[compiler]/[[...page]].tsx b/torchci/pages/benchmark/[suite]/[compiler]/[[...page]].tsx index c8ef5b8613..8ea01597d6 100644 --- a/torchci/pages/benchmark/[suite]/[compiler]/[[...page]].tsx +++ b/torchci/pages/benchmark/[suite]/[compiler]/[[...page]].tsx @@ -30,7 +30,6 @@ import { Granularity } from "components/metrics/panels/TimeSeriesPanel"; import dayjs from "dayjs"; import { augmentData } from "lib/benchmark/compilerUtils"; import { fetcher } from "lib/GeneralUtils"; -import { RocksetParam } from "lib/rockset"; import { BranchAndCommit, CompilerPerformanceData } from "lib/types"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; @@ -55,7 +54,7 @@ function Report({ }: { dashboard: string; queryName: string; - queryParams: RocksetParam[]; + queryParams: { [key: string]: any }; startTime: dayjs.Dayjs; stopTime: dayjs.Dayjs; granularity: Granularity; @@ -68,27 +67,13 @@ function Report({ lBranchAndCommit: BranchAndCommit; rBranchAndCommit: BranchAndCommit; }) { - const queryCollection = "inductor"; - - const queryParamsWithL: RocksetParam[] = [ - { - name: "branches", - type: "string", - value: lBranchAndCommit.branch, - }, - { - name: "commits", - type: "string", - value: lBranchAndCommit.commit, - }, - { - name: "getJobId", - type: "bool", - value: true, - }, + const queryParamsWithL: { [key: string]: any } = { ...queryParams, - ]; - const lUrl = `/api/query/${queryCollection}/${queryName}?parameters=${encodeURIComponent( + branches: [lBranchAndCommit.branch], + commits: [lBranchAndCommit.commit], + getJobId: true, + }; + const lUrl = `/api/clickhouse/${queryName}?parameters=${encodeURIComponent( JSON.stringify(queryParamsWithL) )}`; @@ -100,25 +85,13 @@ function Report({ ? lData.filter((e: CompilerPerformanceData) => e.suite === suite) : lData; - const queryParamsWithR: RocksetParam[] = [ - { - name: "branches", - type: "string", - value: rBranchAndCommit.branch, - }, - { - name: "commits", - type: "string", - value: rBranchAndCommit.commit, - }, - { - name: "getJobId", - type: "bool", - value: true, - }, + const queryParamsWithR: { [key: string]: any } = { ...queryParams, - ]; - const rUrl = `/api/query/${queryCollection}/${queryName}?parameters=${encodeURIComponent( + branches: [rBranchAndCommit.branch], + commits: [rBranchAndCommit.commit], + getJobId: true, + }; + const rUrl = `/api/clickhouse/${queryName}?parameters=${encodeURIComponent( JSON.stringify(queryParamsWithR) )}`; @@ -193,7 +166,7 @@ function Report({ export default function Page() { const router = useRouter(); - // The dimensions to query Rockset + // The dimensions to query const suite: string = (router.query.suite as string) ?? undefined; const compiler: string = (router.query.compiler as string) ?? undefined; const model: string = (router.query.model as string) ?? undefined; @@ -291,48 +264,19 @@ export default function Page() { return ; } - const queryParams: RocksetParam[] = [ - { - name: "timezone", - type: "string", - value: Intl.DateTimeFormat().resolvedOptions().timeZone, - }, - { - name: "startTime", - type: "string", - value: startTime, - }, - { - name: "stopTime", - type: "string", - value: stopTime, - }, - { - name: "granularity", - type: "string", - value: granularity, - }, - { - name: "mode", - type: "string", - value: mode, - }, - { - name: "compilers", - type: "string", - value: compiler, - }, - { - name: "dtypes", - type: "string", - value: dtype, - }, - { - name: "device", - type: "string", - value: DISPLAY_NAMES_TO_DEVICE_NAMES[deviceName], - }, - ]; + const queryParams: { [key: string]: any } = { + commits: [], + compilers: [compiler], + device: DISPLAY_NAMES_TO_DEVICE_NAMES[deviceName], + dtypes: dtype, + getJobId: false, + granularity: granularity, + mode: mode, + startTime: dayjs(startTime).utc().format("YYYY-MM-DDTHH:mm:ss.SSS"), + stopTime: dayjs(stopTime).utc().format("YYYY-MM-DDTHH:mm:ss.SSS"), + suites: [suite], + workflowId: 0, + }; return (
@@ -392,7 +336,7 @@ export default function Page() { titlePrefix={"Base"} fallbackIndex={-1} // Default to the next to latest in the window timeRange={timeRange} - useClickHouse={false} + useClickHouse={true} /> —Diff→ @@ -408,7 +352,7 @@ export default function Page() { titlePrefix={"New"} fallbackIndex={0} // Default to the latest commit timeRange={timeRange} - useClickHouse={false} + useClickHouse={true} /> diff --git a/torchci/pages/benchmark/compilers.tsx b/torchci/pages/benchmark/compilers.tsx index d2505ffca7..6f57cd54f5 100644 --- a/torchci/pages/benchmark/compilers.tsx +++ b/torchci/pages/benchmark/compilers.tsx @@ -31,7 +31,6 @@ import { Granularity } from "components/metrics/panels/TimeSeriesPanel"; import dayjs from "dayjs"; import { augmentData } from "lib/benchmark/compilerUtils"; import { fetcher } from "lib/GeneralUtils"; -import { RocksetParam } from "lib/rockset"; import { BranchAndCommit } from "lib/types"; import { useRouter } from "next/router"; import { useEffect, useState } from "react"; @@ -50,7 +49,7 @@ function Report({ lBranchAndCommit, rBranchAndCommit, }: { - queryParams: RocksetParam[]; + queryParams: { [key: string]: any }; startTime: dayjs.Dayjs; stopTime: dayjs.Dayjs; granularity: Granularity; @@ -61,28 +60,13 @@ function Report({ lBranchAndCommit: BranchAndCommit; rBranchAndCommit: BranchAndCommit; }) { - const queryCollection = "inductor"; const queryName = "compilers_benchmark_performance"; - - const queryParamsWithL: RocksetParam[] = [ - { - name: "suites", - type: "string", - value: Object.keys(SUITES).join(","), - }, - { - name: "branches", - type: "string", - value: lBranchAndCommit.branch, - }, - { - name: "commits", - type: "string", - value: lBranchAndCommit.commit, - }, + const queryParamsWithL: { [key: string]: any } = { ...queryParams, - ]; - const lUrl = `/api/query/${queryCollection}/${queryName}?parameters=${encodeURIComponent( + branches: [lBranchAndCommit.branch], + commits: lBranchAndCommit.commit ? [lBranchAndCommit.commit] : [], + }; + const lUrl = `/api/clickhouse/${queryName}?parameters=${encodeURIComponent( JSON.stringify(queryParamsWithL) )}`; @@ -91,20 +75,12 @@ function Report({ }); lData = augmentData(lData); - const queryParamsWithR: RocksetParam[] = [ - { - name: "branches", - type: "string", - value: rBranchAndCommit.branch, - }, - { - name: "commits", - type: "string", - value: rBranchAndCommit.commit, - }, + const queryParamsWithR: { [key: string]: any } = { ...queryParams, - ]; - const rUrl = `/api/query/${queryCollection}/${queryName}?parameters=${encodeURIComponent( + branches: [rBranchAndCommit.branch], + commits: rBranchAndCommit.commit ? [rBranchAndCommit.commit] : [], + }; + const rUrl = `/api/clickhouse/${queryName}?parameters=${encodeURIComponent( JSON.stringify(queryParamsWithR) )}`; @@ -268,43 +244,19 @@ export default function Page() { ); }, [router.query]); - const queryParams: RocksetParam[] = [ - { - name: "timezone", - type: "string", - value: Intl.DateTimeFormat().resolvedOptions().timeZone, - }, - { - name: "startTime", - type: "string", - value: startTime, - }, - { - name: "stopTime", - type: "string", - value: stopTime, - }, - { - name: "granularity", - type: "string", - value: granularity, - }, - { - name: "mode", - type: "string", - value: mode, - }, - { - name: "dtypes", - type: "string", - value: dtype, - }, - { - name: "device", - type: "string", - value: DISPLAY_NAMES_TO_DEVICE_NAMES[deviceName], - }, - ]; + const queryParams: { [key: string]: any } = { + commits: [], + compilers: [], + device: DISPLAY_NAMES_TO_DEVICE_NAMES[deviceName], + dtypes: dtype, + getJobId: false, + granularity: granularity, + mode: mode, + startTime: dayjs(startTime).utc().format("YYYY-MM-DDTHH:mm:ss.SSS"), + stopTime: dayjs(stopTime).utc().format("YYYY-MM-DDTHH:mm:ss.SSS"), + suites: Object.keys(SUITES), + workflowId: 0, + }; return (
@@ -361,7 +313,7 @@ export default function Page() { titlePrefix={"Base"} fallbackIndex={-1} // Default to the next to latest in the window timeRange={timeRange} - useClickHouse={false} + useClickHouse={true} /> —Diff→ @@ -377,7 +329,7 @@ export default function Page() { titlePrefix={"New"} fallbackIndex={0} // Default to the latest commit timeRange={timeRange} - useClickHouse={false} + useClickHouse={true} /> diff --git a/torchci/pages/benchmark/llms.tsx b/torchci/pages/benchmark/llms.tsx index 6bdcfc0ac0..7dab5724d1 100644 --- a/torchci/pages/benchmark/llms.tsx +++ b/torchci/pages/benchmark/llms.tsx @@ -230,7 +230,6 @@ export default function Page() { ); }, [router.query]); - const queryCollection = "benchmarks"; const queryName = "oss_ci_benchmark_names"; const queryParams = { deviceArch: deviceName === DEFAULT_DEVICE_NAME ? "" : deviceName,