Skip to content

Commit

Permalink
fix show table status case sensitivity (pingcap#7518)
Browse files Browse the repository at this point in the history
  • Loading branch information
iamzhoug37 committed Oct 26, 2018
1 parent a4cff40 commit 538dc04
Show file tree
Hide file tree
Showing 21 changed files with 234 additions and 150 deletions.
19 changes: 19 additions & 0 deletions executor/show_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -655,6 +655,25 @@ func (s *testSuite) TestShowTableStatus(c *C) {
c.Assert(rows[0].GetString(16), Equals, "partitioned")
}

func (s *testSuite) TestShowTableStatusCaseInsensitive(c *C) {
tk := testkit.NewTestKit(c, s.store)

tk.MustExec("use test")
tk.MustExec(`drop table if exists t;`)
tk.MustExec(`create table t(a bigint);`)

// It's not easy to test the result contents because every time the test runs, "Create_time" changed.
rs, err := tk.Exec("SHOW TABLE STATUS like 'T';")
c.Assert(errors.ErrorStack(err), Equals, "")
c.Assert(rs, NotNil)
rows, err := session.GetRows4Test(context.Background(), tk.Se, rs)
c.Assert(errors.ErrorStack(err), Equals, "")
err = rs.Close()
c.Assert(errors.ErrorStack(err), Equals, "")

c.Assert(rows[0].GetString(0), Equals, "t")
}

func (s *testSuite) TestShowSlow(c *C) {
tk := testkit.NewTestKit(c, s.store)
// The test result is volatile, because
Expand Down
8 changes: 8 additions & 0 deletions expression/builtin_like.go
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,10 @@ package expression

import (
"regexp"
"strings"

"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/stringutil"
Expand Down Expand Up @@ -80,6 +82,12 @@ func (b *builtinLikeSig) evalInt(row chunk.Row) (int64, bool, error) {
return 0, isNull, errors.Trace(err)
}
escape := byte(val)

if variable.SysVars["lower_case_table_names"].Value != "0" {
patternStr = strings.ToLower(patternStr)
valStr = strings.ToLower(valStr)
}

patChars, patTypes := stringutil.CompilePattern(patternStr, escape)
match := stringutil.DoMatch(valStr, patChars, patTypes)
return boolToInt64(match), false, nil
Expand Down
47 changes: 32 additions & 15 deletions expression/evaluator_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (
"github.com/pingcap/parser/mysql"
"github.com/pingcap/parser/terror"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
"github.com/pingcap/tidb/util/mock"
Expand Down Expand Up @@ -545,25 +546,41 @@ func (s *testEvaluatorSuite) TestExtract(c *C) {

func (s *testEvaluatorSuite) TestLike(c *C) {
defer testleak.AfterTest(c)()
tests := []struct {

lowerCaseTableNamesValues := []string{"0", "2"}

caseTests := [2][]struct {
input string
pattern string
match int
}{
{"a", "", 0},
{"a", "a", 1},
{"a", "b", 0},
{"aA", "Aa", 0},
{"aAb", `Aa%`, 0},
{"aAb", "aA_", 1},
}
for _, tt := range tests {
fc := funcs[ast.Like]
f, err := fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(tt.input, tt.pattern, 0)))
c.Assert(err, IsNil)
r, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(r, testutil.DatumEquals, types.NewDatum(tt.match))
{
{"a", "", 0},
{"a", "a", 1},
{"a", "b", 0},
{"aA", "Aa", 0},
{"aAb", `Aa%`, 0},
{"aAb", "aA_", 1},
}, {
{"a", "", 0},
{"a", "a", 1},
{"a", "b", 0},
{"aA", "Aa", 1},
{"aAb", `Aa%`, 1},
{"aAb", "aA_", 1},
}}

for index, lowerCaseTableNamesValue := range lowerCaseTableNamesValues {
variable.SysVars["lower_case_table_names"].Value = lowerCaseTableNamesValue
tests := &caseTests[index]
for _, tt := range *tests {
fc := funcs[ast.Like]
f, err := fc.getFunction(s.ctx, s.datumsToConstants(types.MakeDatums(tt.input, tt.pattern, 0)))
c.Assert(err, IsNil)
r, err := evalBuiltinFunc(f, chunk.Row{})
c.Assert(err, IsNil)
c.Assert(r, testutil.DatumEquals, types.NewDatum(tt.match))
}
}
}

Expand Down
2 changes: 1 addition & 1 deletion expression/integration_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2302,7 +2302,7 @@ func (s *testIntegrationSuite) TestBuiltin(c *C) {
likeTests := []testCase{
{"a", "a", 1},
{"a", "b", 0},
{"aA", "Aa", 0},
{"aA", "Aa", 1},
{`aA%`, "aAab", 1},
{"aA_", "Aaab", 0},
{"Aa_", "Aab", 1},
Expand Down
40 changes: 20 additions & 20 deletions planner/core/exhaust_physical_plans.go
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ import (
)

func (p *LogicalUnionScan) exhaustPhysicalPlans(prop *property.PhysicalProperty) []PhysicalPlan {
us := PhysicalUnionScan{Conditions: p.conditions}.init(p.ctx, p.stats, prop)
us := PhysicalUnionScan{Conditions: p.conditions}.Init(p.ctx, p.stats, prop)
return []PhysicalPlan{us}
}

Expand Down Expand Up @@ -132,7 +132,7 @@ func (p *LogicalJoin) getMergeJoin(prop *property.PhysicalProperty) []PhysicalPl
DefaultValues: p.DefaultValues,
LeftKeys: leftKeys,
RightKeys: rightKeys,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt))
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt))
mergeJoin.SetSchema(p.schema)
mergeJoin.OtherConditions = p.moveEqualToOtherConditions(offsets)
if reqProps, ok := mergeJoin.tryToGetChildReqProp(prop); ok {
Expand Down Expand Up @@ -238,7 +238,7 @@ func (p *LogicalJoin) getEnforcedMergeJoin(prop *property.PhysicalProperty) []Ph
LeftKeys: leftKeys,
RightKeys: rightKeys,
OtherConditions: p.OtherConditions,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt))
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt))
enforcedPhysicalMergeJoin.SetSchema(p.schema)
enforcedPhysicalMergeJoin.childrenReqProps = []*property.PhysicalProperty{lProp, rProp}
return []PhysicalPlan{enforcedPhysicalMergeJoin}
Expand Down Expand Up @@ -274,7 +274,7 @@ func (p *LogicalJoin) getHashJoin(prop *property.PhysicalProperty, innerIdx int)
Concurrency: uint(p.ctx.GetSessionVars().HashJoinConcurrency),
DefaultValues: p.DefaultValues,
InnerChildIdx: innerIdx,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
hashJoin.SetSchema(p.schema)
return hashJoin
}
Expand Down Expand Up @@ -345,7 +345,7 @@ func (p *LogicalJoin) constructIndexJoin(prop *property.PhysicalProperty, innerJ
innerPlan: innerPlan,
KeyOff2IdxOff: newKeyOff,
Ranges: ranges,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
join.SetSchema(p.schema)
return []PhysicalPlan{join}
}
Expand Down Expand Up @@ -442,7 +442,7 @@ func (p *LogicalJoin) constructInnerTableScan(ds *DataSource, pk *expression.Col
filterCondition: ds.pushedDownConds,
Ranges: ranges,
rangeDecidedBy: outerJoinKeys,
}.init(ds.ctx)
}.Init(ds.ctx)
ts.SetSchema(ds.schema)

var rowCount float64
Expand Down Expand Up @@ -472,7 +472,7 @@ func (p *LogicalJoin) constructInnerUnionScan(us *LogicalUnionScan, reader Physi
}
// Use `reader.stats` instead of `us.stats` because it should be more accurate. No need to specify
// childrenReqProps now since we have got reader already.
physicalUnionScan := PhysicalUnionScan{Conditions: us.conditions}.init(us.ctx, reader.statsInfo(), nil)
physicalUnionScan := PhysicalUnionScan{Conditions: us.conditions}.Init(us.ctx, reader.statsInfo(), nil)
physicalUnionScan.SetChildren(reader)
return physicalUnionScan
}
Expand All @@ -489,7 +489,7 @@ func (p *LogicalJoin) constructInnerIndexScan(ds *DataSource, idx *model.IndexIn
KeepOrder: false,
Ranges: ranger.FullRange(),
rangeDecidedBy: outerJoinKeys,
}.init(ds.ctx)
}.Init(ds.ctx)
is.filterCondition = remainedConds

var rowCount float64
Expand All @@ -507,7 +507,7 @@ func (p *LogicalJoin) constructInnerIndexScan(ds *DataSource, idx *model.IndexIn
}
if !isCoveringIndex(is.Columns, is.Index.Columns, is.Table.PKIsHandle) {
// On this way, it's double read case.
ts := PhysicalTableScan{Columns: ds.Columns, Table: is.Table}.init(ds.ctx)
ts := PhysicalTableScan{Columns: ds.Columns, Table: is.Table}.Init(ds.ctx)
ts.SetSchema(is.dataSourceSchema)
cop.tablePlan = ts
}
Expand Down Expand Up @@ -705,7 +705,7 @@ func (p *LogicalProjection) exhaustPhysicalPlans(prop *property.PhysicalProperty
proj := PhysicalProjection{
Exprs: p.Exprs,
CalculateNoDelay: p.calculateNoDelay,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), newProp)
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), newProp)
proj.SetSchema(p.schema)
return []PhysicalPlan{proj}
}
Expand All @@ -718,7 +718,7 @@ func (lt *LogicalTopN) getPhysTopN() []PhysicalPlan {
ByItems: lt.ByItems,
Count: lt.Count,
Offset: lt.Offset,
}.init(lt.ctx, lt.stats, resultProp)
}.Init(lt.ctx, lt.stats, resultProp)
ret = append(ret, topN)
}
return ret
Expand All @@ -735,7 +735,7 @@ func (lt *LogicalTopN) getPhysLimits() []PhysicalPlan {
limit := PhysicalLimit{
Count: lt.Count,
Offset: lt.Offset,
}.init(lt.ctx, lt.stats, resultProp)
}.Init(lt.ctx, lt.stats, resultProp)
ret = append(ret, limit)
}
return ret
Expand Down Expand Up @@ -767,7 +767,7 @@ func (la *LogicalApply) exhaustPhysicalPlans(prop *property.PhysicalProperty) []
PhysicalJoin: la.getHashJoin(prop, 1),
OuterSchema: la.corCols,
rightChOffset: la.children[0].Schema().Len(),
}.init(la.ctx,
}.Init(la.ctx,
la.stats.ScaleByExpectCnt(prop.ExpectedCnt),
&property.PhysicalProperty{ExpectedCnt: math.MaxFloat64, Cols: prop.Cols, Desc: prop.Desc},
&property.PhysicalProperty{ExpectedCnt: math.MaxFloat64})
Expand Down Expand Up @@ -855,7 +855,7 @@ func (la *LogicalAggregation) exhaustPhysicalPlans(prop *property.PhysicalProper
func (p *LogicalSelection) exhaustPhysicalPlans(prop *property.PhysicalProperty) []PhysicalPlan {
sel := PhysicalSelection{
Conditions: p.Conditions,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop)
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop)
return []PhysicalPlan{sel}
}

Expand All @@ -869,7 +869,7 @@ func (p *LogicalLimit) exhaustPhysicalPlans(prop *property.PhysicalProperty) []P
limit := PhysicalLimit{
Offset: p.Offset,
Count: p.Count,
}.init(p.ctx, p.stats, resultProp)
}.Init(p.ctx, p.stats, resultProp)
ret = append(ret, limit)
}
return ret
Expand All @@ -878,7 +878,7 @@ func (p *LogicalLimit) exhaustPhysicalPlans(prop *property.PhysicalProperty) []P
func (p *LogicalLock) exhaustPhysicalPlans(prop *property.PhysicalProperty) []PhysicalPlan {
lock := PhysicalLock{
Lock: p.Lock,
}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop)
}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), prop)
return []PhysicalPlan{lock}
}

Expand All @@ -891,13 +891,13 @@ func (p *LogicalUnionAll) exhaustPhysicalPlans(prop *property.PhysicalProperty)
for range p.children {
chReqProps = append(chReqProps, &property.PhysicalProperty{ExpectedCnt: prop.ExpectedCnt})
}
ua := PhysicalUnionAll{}.init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
ua := PhysicalUnionAll{}.Init(p.ctx, p.stats.ScaleByExpectCnt(prop.ExpectedCnt), chReqProps...)
ua.SetSchema(p.Schema())
return []PhysicalPlan{ua}
}

func (ls *LogicalSort) getPhysicalSort(prop *property.PhysicalProperty) *PhysicalSort {
ps := PhysicalSort{ByItems: ls.ByItems}.init(ls.ctx, ls.stats.ScaleByExpectCnt(prop.ExpectedCnt), &property.PhysicalProperty{ExpectedCnt: math.MaxFloat64})
ps := PhysicalSort{ByItems: ls.ByItems}.Init(ls.ctx, ls.stats.ScaleByExpectCnt(prop.ExpectedCnt), &property.PhysicalProperty{ExpectedCnt: math.MaxFloat64})
return ps
}

Expand All @@ -907,7 +907,7 @@ func (ls *LogicalSort) getNominalSort(reqProp *property.PhysicalProperty) *Nomin
return nil
}
prop.ExpectedCnt = reqProp.ExpectedCnt
ps := NominalSort{}.init(ls.ctx, prop)
ps := NominalSort{}.Init(ls.ctx, prop)
return ps
}

Expand All @@ -928,6 +928,6 @@ func (p *LogicalMaxOneRow) exhaustPhysicalPlans(prop *property.PhysicalProperty)
if !prop.IsEmpty() {
return nil
}
mor := PhysicalMaxOneRow{}.init(p.ctx, p.stats, &property.PhysicalProperty{ExpectedCnt: 2})
mor := PhysicalMaxOneRow{}.Init(p.ctx, p.stats, &property.PhysicalProperty{ExpectedCnt: 2})
return []PhysicalPlan{mor}
}
10 changes: 5 additions & 5 deletions planner/core/expression_rewriter.go
Original file line number Diff line number Diff line change
Expand Up @@ -393,7 +393,7 @@ func (er *expressionRewriter) handleCompareSubquery(v *ast.CompareSubqueryExpr)
// handleOtherComparableSubq handles the queries like < any, < max, etc. For example, if the query is t.id < any (select s.id from s),
// it will be rewrote to t.id < (select max(s.id) from s).
func (er *expressionRewriter) handleOtherComparableSubq(lexpr, rexpr expression.Expression, np LogicalPlan, useMin bool, cmpFunc string, all bool) {
plan4Agg := LogicalAggregation{}.init(er.ctx)
plan4Agg := LogicalAggregation{}.Init(er.ctx)
plan4Agg.SetChildren(np)

// Create a "max" or "min" aggregation.
Expand Down Expand Up @@ -468,7 +468,7 @@ func (er *expressionRewriter) buildQuantifierPlan(plan4Agg *LogicalAggregation,
joinSchema := er.p.Schema()
proj := LogicalProjection{
Exprs: expression.Column2Exprs(joinSchema.Clone().Columns[:outerSchemaLen]),
}.init(er.ctx)
}.Init(er.ctx)
proj.SetSchema(expression.NewSchema(joinSchema.Clone().Columns[:outerSchemaLen]...))
proj.Exprs = append(proj.Exprs, cond)
proj.schema.Append(&expression.Column{
Expand All @@ -489,7 +489,7 @@ func (er *expressionRewriter) handleNEAny(lexpr, rexpr expression.Expression, np
countFunc := aggregation.NewAggFuncDesc(er.ctx, ast.AggFuncCount, []expression.Expression{rexpr}, true)
plan4Agg := LogicalAggregation{
AggFuncs: []*aggregation.AggFuncDesc{firstRowFunc, countFunc},
}.init(er.ctx)
}.Init(er.ctx)
plan4Agg.SetChildren(np)
firstRowResultCol := &expression.Column{
ColName: model.NewCIStr("col_firstRow"),
Expand All @@ -515,7 +515,7 @@ func (er *expressionRewriter) handleEQAll(lexpr, rexpr expression.Expression, np
countFunc := aggregation.NewAggFuncDesc(er.ctx, ast.AggFuncCount, []expression.Expression{rexpr}, true)
plan4Agg := LogicalAggregation{
AggFuncs: []*aggregation.AggFuncDesc{firstRowFunc, countFunc},
}.init(er.ctx)
}.Init(er.ctx)
plan4Agg.SetChildren(np)
firstRowResultCol := &expression.Column{
ColName: model.NewCIStr("col_firstRow"),
Expand Down Expand Up @@ -584,7 +584,7 @@ out:
p = p.Children()[0]
case *LogicalAggregation:
if len(plan.GroupByItems) == 0 {
p = LogicalTableDual{RowCount: 1}.init(er.ctx)
p = LogicalTableDual{RowCount: 1}.Init(er.ctx)
break out
}
p = p.Children()[0]
Expand Down
Loading

0 comments on commit 538dc04

Please sign in to comment.