Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

expression: add header for tidb_decode_plan result (#18440) #18501

Merged
merged 1 commit into from
Jul 13, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
28 changes: 15 additions & 13 deletions expression/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -4399,24 +4399,26 @@ func (s *testIntegrationSuite) TestTiDBDecodePlanFunc(c *C) {
"8xNwozCTFfMTcJMQkwCWx0HVlATlVMTCksIG5vdChpc251bGwVHAApUhcAUDIpKQo0CTEwXzE2CTEJMTAwMDAJdAHB2Dp0MSwgcmFuZ2U6Wy1p" +
"bmYsK2luZl0sIGtlZXAgb3JkZXI6ZmFsc2UsIHN0YXRzOnBzZXVkbwoFtgAyAZcEMAk6tgAEMjAFtgQyMDq2AAg5LCBmtgAAMFa3AAA5FbcAO" +
"T63AAAyzrcA')").Check(testkit.Rows("" +
"\tStreamAgg_13 \troot\t1 \tfuncs:count(1)\n" +
"\t└─HashJoin_14 \troot\t0 \tinner join, inner:TableReader_21, equal:[eq(Column#1, Column#9) eq(Column#2, Column#10)]\n" +
"\t ├─TableReader_18 \troot\t0 \tdata:Selection_17\n" +
"\t │ └─Selection_17 \tcop \t0 \tlt(Column#1, NULL), not(isnull(Column#1)), not(isnull(Column#2))\n" +
"\t │ └─TableScan_16\tcop \t10000\ttable:t1, range:[-inf,+inf], keep order:false, stats:pseudo\n" +
"\t └─TableReader_21 \troot\t0 \tdata:Selection_20\n" +
"\t └─Selection_20 \tcop \t0 \tlt(Column#9, NULL), not(isnull(Column#10)), not(isnull(Column#9))\n" +
"\t └─TableScan_19\tcop \t10000\ttable:t2, range:[-inf,+inf], keep order:false, stats:pseudo"))
"\tid \ttask\testRows\toperator info\n" +
"\tStreamAgg_13 \troot\t1 \tfuncs:count(1)\n" +
"\t└─HashJoin_14 \troot\t0 \tinner join, inner:TableReader_21, equal:[eq(Column#1, Column#9) eq(Column#2, Column#10)]\n" +
"\t ├─TableReader_18 \troot\t0 \tdata:Selection_17\n" +
"\t │ └─Selection_17 \tcop \t0 \tlt(Column#1, NULL), not(isnull(Column#1)), not(isnull(Column#2))\n" +
"\t │ └─TableScan_16\tcop \t10000 \ttable:t1, range:[-inf,+inf], keep order:false, stats:pseudo\n" +
"\t └─TableReader_21 \troot\t0 \tdata:Selection_20\n" +
"\t └─Selection_20 \tcop \t0 \tlt(Column#9, NULL), not(isnull(Column#10)), not(isnull(Column#9))\n" +
"\t └─TableScan_19\tcop \t10000 \ttable:t2, range:[-inf,+inf], keep order:false, stats:pseudo"))
tk.MustQuery("select tidb_decode_plan('rwPwcTAJNV8xNAkwCTEJZnVuY3M6bWF4KHRlc3QudC5hKS0+Q29sdW1uIzQJMQl0aW1lOj" +
"IyMy45MzXCtXMsIGxvb3BzOjIJMTI4IEJ5dGVzCU4vQQoxCTE2XzE4CTAJMQlvZmZzZXQ6MCwgY291bnQ6MQkxCQlHFDE4LjQyMjJHAAhOL0" +
"EBBCAKMgkzMl8yOAkBlEBpbmRleDpMaW1pdF8yNwkxCQ0+DDYuODUdPSwxLCBycGMgbnVtOiANDAUpGDE1MC44MjQFKjhwcm9jIGtleXM6MA" +
"kxOTgdsgAzAbIAMgFearIAFDU3LjM5NgVKAGwN+BGxIDQJMTNfMjYJMQGgHGFibGU6dCwgCbqwaWR4KGEpLCByYW5nZTooMCwraW5mXSwga2" +
"VlcCBvcmRlcjp0cnVlLCBkZXNjAT8kaW1lOjU2LjY2MR1rJDEJTi9BCU4vQQo=')").Check(testkit.Rows("" +
"\tStreamAgg_14 \troot\t1\tfuncs:max(test.t.a)->Column#4 \t1\ttime:223.935µs, loops:2 \t128 Bytes\tN/A\n" +
"\t└─Limit_18 \troot\t1\toffset:0, count:1 \t1\ttime:218.422µs, loops:2 \tN/A \tN/A\n" +
"\t └─IndexReader_28 \troot\t1\tindex:Limit_27 \t1\ttime:216.85µs, loops:1, rpc num: 1, rpc time:150.824µs, proc keys:0\t198 Bytes\tN/A\n" +
"\t └─Limit_27 \tcop \t1\toffset:0, count:1 \t1\ttime:57.396µs, loops:2 \tN/A \tN/A\n" +
"\t └─IndexScan_26\tcop \t1\ttable:t, index:idx(a), range:(0,+inf], keep order:true, desc\t1\ttime:56.661µs, loops:1 \tN/A \tN/A"))
"\tid \ttask\testRows\toperator info \tactRows\texecution info \tmemory \tdisk\n" +
"\tStreamAgg_14 \troot\t1 \tfuncs:max(test.t.a)->Column#4 \t1 \ttime:223.935µs, loops:2 \t128 Bytes\tN/A\n" +
"\t└─Limit_18 \troot\t1 \toffset:0, count:1 \t1 \ttime:218.422µs, loops:2 \tN/A \tN/A\n" +
"\t └─IndexReader_28 \troot\t1 \tindex:Limit_27 \t1 \ttime:216.85µs, loops:1, rpc num: 1, rpc time:150.824µs, proc keys:0\t198 Bytes\tN/A\n" +
"\t └─Limit_27 \tcop \t1 \toffset:0, count:1 \t1 \ttime:57.396µs, loops:2 \tN/A \tN/A\n" +
"\t └─IndexScan_26\tcop \t1 \ttable:t, index:idx(a), range:(0,+inf], keep order:true, desc\t1 \ttime:56.661µs, loops:1 \tN/A \tN/A"))
}

func (s *testIntegrationSuite) TestTiDBInternalFunc(c *C) {
Expand Down
30 changes: 17 additions & 13 deletions infoschema/tables_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -920,7 +920,7 @@ func (s *testTableSuite) TestStmtSummaryTable(c *C) {
tk.MustExec("create table p(a int primary key, b int)")
for i := 1; i < 3; i++ {
tk.MustQuery("select b from p where a=1")
expectedResult := fmt.Sprintf("%d \tPoint_Get_1\troot\t1\ttable:p, handle:1 %s", i, "test.p")
expectedResult := fmt.Sprintf("%d \tid \ttask\testRows\toperator info\n\tPoint_Get_1\troot\t1 \ttable:p, handle:1 %s", i, "test.p")
// Also make sure that the plan digest is not empty
tk.MustQuery(`select exec_count, plan, table_names
from information_schema.statements_summary
Expand Down Expand Up @@ -952,9 +952,10 @@ func (s *testTableSuite) TestStmtSummaryTable(c *C) {
max_prewrite_regions, avg_affected_rows, query_sample_text, plan
from information_schema.statements_summary
where digest_text like 'select * from t%'`,
).Check(testkit.Rows("Select test test.t t:k 1 2 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tIndexLookUp_10\troot\t100\t\n" +
"\t├─IndexScan_8 \tcop \t100\ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t100\ttable:t, keep order:false, stats:pseudo"))
).Check(testkit.Rows("Select test test.t t:k 1 2 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tid \ttask\testRows\toperator info\n" +
"\tIndexLookUp_10\troot\t100 \t\n" +
"\t├─IndexScan_8 \tcop \t100 \ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t100 \ttable:t, keep order:false, stats:pseudo"))

// select ... order by
tk.MustQuery(`select stmt_type, schema_name, table_names, index_names, exec_count, sum_cop_task_num, avg_total_keys,
Expand All @@ -972,9 +973,10 @@ func (s *testTableSuite) TestStmtSummaryTable(c *C) {
max_prewrite_regions, avg_affected_rows, query_sample_text, plan
from information_schema.statements_summary
where digest_text like 'select * from t%'`,
).Check(testkit.Rows("Select test test.t t:k 2 4 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tIndexLookUp_10\troot\t100\t\n" +
"\t├─IndexScan_8 \tcop \t100\ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t100\ttable:t, keep order:false, stats:pseudo"))
).Check(testkit.Rows("Select test test.t t:k 2 4 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tid \ttask\testRows\toperator info\n" +
"\tIndexLookUp_10\troot\t100 \t\n" +
"\t├─IndexScan_8 \tcop \t100 \ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t100 \ttable:t, keep order:false, stats:pseudo"))

// Disable it again.
tk.MustExec("set global tidb_enable_stmt_summary = false")
Expand Down Expand Up @@ -1021,9 +1023,10 @@ func (s *testTableSuite) TestStmtSummaryTable(c *C) {
max_prewrite_regions, avg_affected_rows, query_sample_text, plan
from information_schema.statements_summary
where digest_text like 'select * from t%'`,
).Check(testkit.Rows("Select test test.t t:k 1 2 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tIndexLookUp_10\troot\t1000\t\n" +
"\t├─IndexScan_8 \tcop \t1000\ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t1000\ttable:t, keep order:false, stats:pseudo"))
).Check(testkit.Rows("Select test test.t t:k 1 2 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tid \ttask\testRows\toperator info\n" +
"\tIndexLookUp_10\troot\t1000 \t\n" +
"\t├─IndexScan_8 \tcop \t1000 \ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t1000 \ttable:t, keep order:false, stats:pseudo"))

// Disable it in global scope.
tk.MustExec("set global tidb_enable_stmt_summary = false")
Expand All @@ -1039,9 +1042,10 @@ func (s *testTableSuite) TestStmtSummaryTable(c *C) {
max_prewrite_regions, avg_affected_rows, query_sample_text, plan
from information_schema.statements_summary
where digest_text like 'select * from t%'`,
).Check(testkit.Rows("Select test test.t t:k 2 4 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tIndexLookUp_10\troot\t1000\t\n" +
"\t├─IndexScan_8 \tcop \t1000\ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t1000\ttable:t, keep order:false, stats:pseudo"))
).Check(testkit.Rows("Select test test.t t:k 2 4 0 0 0 0 0 0 0 0 0 select * from t where a=2 \tid \ttask\testRows\toperator info\n" +
"\tIndexLookUp_10\troot\t1000 \t\n" +
"\t├─IndexScan_8 \tcop \t1000 \ttable:t, index:k(a), range:[2,2], keep order:false, stats:pseudo\n" +
"\t└─TableScan_9 \tcop \t1000 \ttable:t, keep order:false, stats:pseudo"))

// Unset session variable.
tk.MustExec("set session tidb_enable_stmt_summary = ''")
Expand Down
54 changes: 41 additions & 13 deletions util/plancodec/codec.go
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ func DecodePlan(planString string) (string, error) {
pd := decoderPool.Get().(*planDecoder)
defer decoderPool.Put(pd)
pd.buf.Reset()
pd.addHeader = true
return pd.decode(planString)
}

Expand All @@ -63,6 +64,7 @@ func DecodeNormalizedPlan(planString string) (string, error) {
pd := decoderPool.Get().(*planDecoder)
defer decoderPool.Put(pd)
pd.buf.Reset()
pd.addHeader = false
return pd.buildPlanTree(planString)
}

Expand All @@ -71,6 +73,7 @@ type planDecoder struct {
depths []int
indents [][]rune
planInfos []*planInfo
addHeader bool
}

type planInfo struct {
Expand All @@ -95,7 +98,6 @@ func (pd *planDecoder) buildPlanTree(planString string) (string, error) {
}
pd.depths = pd.depths[:0]
pd.planInfos = pd.planInfos[:0]
planInfos := pd.planInfos
for _, node := range nodes {
p, err := decodePlanInfo(node)
if err != nil {
Expand All @@ -104,20 +106,24 @@ func (pd *planDecoder) buildPlanTree(planString string) (string, error) {
if p == nil {
continue
}
planInfos = append(planInfos, p)
pd.planInfos = append(pd.planInfos, p)
pd.depths = append(pd.depths, p.depth)
}

if pd.addHeader {
pd.addPlanHeader()
}

// Calculated indentation of plans.
pd.initPlanTreeIndents()
for i := 1; i < len(pd.depths); i++ {
parentIndex := pd.findParentIndex(i)
pd.fillIndent(parentIndex, i)
}
// Align the value of plan fields.
pd.alignFields(planInfos)
pd.alignFields()

for i, p := range planInfos {
for i, p := range pd.planInfos {
if i > 0 {
pd.buf.WriteByte(lineBreaker)
}
Expand All @@ -134,6 +140,28 @@ func (pd *planDecoder) buildPlanTree(planString string) (string, error) {
return pd.buf.String(), nil
}

func (pd *planDecoder) addPlanHeader() {
if len(pd.planInfos) == 0 {
return
}
header := &planInfo{
depth: 0,
fields: []string{"id", "task", "estRows", "operator info", "actRows", "execution info", "memory", "disk"},
}
if len(pd.planInfos[0].fields) < len(header.fields) {
// plan without runtime information.
header.fields = header.fields[:len(pd.planInfos[0].fields)]
}
planInfos := make([]*planInfo, 0, len(pd.planInfos)+1)
depths := make([]int, 0, len(pd.planInfos)+1)
planInfos = append(planInfos, header)
planInfos = append(planInfos, pd.planInfos...)
depths = append(depths, header.depth)
depths = append(depths, pd.depths...)
pd.planInfos = planInfos
pd.depths = depths
}

func (pd *planDecoder) initPlanTreeIndents() {
pd.indents = pd.indents[:0]
for i := 0; i < len(pd.depths); i++ {
Expand Down Expand Up @@ -173,29 +201,29 @@ func (pd *planDecoder) fillIndent(parentIndex, childIndex int) {
}
}

func (pd *planDecoder) alignFields(planInfos []*planInfo) {
if len(planInfos) == 0 {
func (pd *planDecoder) alignFields() {
if len(pd.planInfos) == 0 {
return
}
// Align fields length. Some plan may doesn't have runtime info, need append `` to align with other plan fields.
maxLen := -1
for _, p := range planInfos {
for _, p := range pd.planInfos {
if len(p.fields) > maxLen {
maxLen = len(p.fields)
}
}
for _, p := range planInfos {
for _, p := range pd.planInfos {
for len(p.fields) < maxLen {
p.fields = append(p.fields, "")
}
}

fieldsLen := len(planInfos[0].fields)
fieldsLen := len(pd.planInfos[0].fields)
// Last field no need to align.
fieldsLen--
for colIdx := 0; colIdx < fieldsLen; colIdx++ {
maxFieldLen := pd.getMaxFieldLength(colIdx, planInfos)
for rowIdx, p := range planInfos {
maxFieldLen := pd.getMaxFieldLength(colIdx)
for rowIdx, p := range pd.planInfos {
fillLen := maxFieldLen - pd.getPlanFieldLen(rowIdx, colIdx, p)
for i := 0; i < fillLen; i++ {
p.fields[colIdx] += " "
Expand All @@ -204,9 +232,9 @@ func (pd *planDecoder) alignFields(planInfos []*planInfo) {
}
}

func (pd *planDecoder) getMaxFieldLength(idx int, planInfos []*planInfo) int {
func (pd *planDecoder) getMaxFieldLength(idx int) int {
maxLength := -1
for rowIdx, p := range planInfos {
for rowIdx, p := range pd.planInfos {
l := pd.getPlanFieldLen(rowIdx, idx, p)
if l > maxLength {
maxLength = l
Expand Down