Skip to content

Commit

Permalink
Merge branch 'master' into fix-stuck
Browse files Browse the repository at this point in the history
  • Loading branch information
YuJuncen authored Oct 9, 2021
2 parents 9aad9ec + 07eb99d commit 53fa6ac
Show file tree
Hide file tree
Showing 88 changed files with 3,776 additions and 2,506 deletions.
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ For support, please contact [PingCAP](http://bit.ly/contact_us_via_github).

### To start using TiDB

See [Quick Start Guide](https://pingcap.com/docs/stable/quick-start-with-tidb/).
See [Quick Start Guide](https://docs.pingcap.com/tidb/stable/quick-start-with-tidb).

### To start developing TiDB

Expand Down
931 changes: 931 additions & 0 deletions bindinfo/bind_serial_test.go

Large diffs are not rendered by default.

1,110 changes: 0 additions & 1,110 deletions bindinfo/bind_test.go

This file was deleted.

119 changes: 119 additions & 0 deletions bindinfo/handle_serial_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,6 +224,125 @@ func TestEvolveInvalidBindings(t *testing.T) {
require.True(t, status == "using" || status == "rejected")
}

var testSQLs = []struct {
createSQL string
overlaySQL string
querySQL string
originSQL string
bindSQL string
dropSQL string
memoryUsage float64
}{
{
createSQL: "binding for select * from t where i>100 using select * from t use index(index_t) where i>100",
overlaySQL: "binding for select * from t where i>99 using select * from t use index(index_t) where i>99",
querySQL: "select * from t where i > 30.0",
originSQL: "select * from `test` . `t` where `i` > ?",
bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) WHERE `i` > 99",
dropSQL: "binding for select * from t where i>100",
memoryUsage: float64(144),
},
{
createSQL: "binding for select * from t union all select * from t using select * from t use index(index_t) union all select * from t use index()",
overlaySQL: "",
querySQL: "select * from t union all select * from t",
originSQL: "select * from `test` . `t` union all select * from `test` . `t`",
bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) UNION ALL SELECT * FROM `test`.`t` USE INDEX ()",
dropSQL: "binding for select * from t union all select * from t",
memoryUsage: float64(200),
},
{
createSQL: "binding for (select * from t) union all (select * from t) using (select * from t use index(index_t)) union all (select * from t use index())",
overlaySQL: "",
querySQL: "(select * from t) union all (select * from t)",
originSQL: "( select * from `test` . `t` ) union all ( select * from `test` . `t` )",
bindSQL: "(SELECT * FROM `test`.`t` USE INDEX (`index_t`)) UNION ALL (SELECT * FROM `test`.`t` USE INDEX ())",
dropSQL: "binding for (select * from t) union all (select * from t)",
memoryUsage: float64(212),
},
{
createSQL: "binding for select * from t intersect select * from t using select * from t use index(index_t) intersect select * from t use index()",
overlaySQL: "",
querySQL: "select * from t intersect select * from t",
originSQL: "select * from `test` . `t` intersect select * from `test` . `t`",
bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) INTERSECT SELECT * FROM `test`.`t` USE INDEX ()",
dropSQL: "binding for select * from t intersect select * from t",
memoryUsage: float64(200),
},
{
createSQL: "binding for select * from t except select * from t using select * from t use index(index_t) except select * from t use index()",
overlaySQL: "",
querySQL: "select * from t except select * from t",
originSQL: "select * from `test` . `t` except select * from `test` . `t`",
bindSQL: "SELECT * FROM `test`.`t` USE INDEX (`index_t`) EXCEPT SELECT * FROM `test`.`t` USE INDEX ()",
dropSQL: "binding for select * from t except select * from t",
memoryUsage: float64(194),
},
{
createSQL: "binding for select * from t using select /*+ use_index(t,index_t)*/ * from t",
overlaySQL: "",
querySQL: "select * from t ",
originSQL: "select * from `test` . `t`",
bindSQL: "SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t`",
dropSQL: "binding for select * from t",
memoryUsage: float64(124),
},
{
createSQL: "binding for delete from t where i = 1 using delete /*+ use_index(t,index_t) */ from t where i = 1",
overlaySQL: "",
querySQL: "delete from t where i = 2",
originSQL: "delete from `test` . `t` where `i` = ?",
bindSQL: "DELETE /*+ use_index(`t` `index_t`)*/ FROM `test`.`t` WHERE `i` = 1",
dropSQL: "binding for delete from t where i = 1",
memoryUsage: float64(148),
},
{
createSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1 using delete /*+ use_index(t,index_t), hash_join(t,t1) */ t, t1 from t inner join t1 on t.s = t1.s where t.i = 1",
overlaySQL: "",
querySQL: "delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 2",
originSQL: "delete `test` . `t` , `test` . `t1` from `test` . `t` join `test` . `t1` on `t` . `s` = `t1` . `s` where `t` . `i` = ?",
bindSQL: "DELETE /*+ use_index(`t` `index_t`) hash_join(`t`, `t1`)*/ `test`.`t`,`test`.`t1` FROM `test`.`t` JOIN `test`.`t1` ON `t`.`s` = `t1`.`s` WHERE `t`.`i` = 1",
dropSQL: "binding for delete t, t1 from t inner join t1 on t.s = t1.s where t.i = 1",
memoryUsage: float64(315),
},
{
createSQL: "binding for update t set s = 'a' where i = 1 using update /*+ use_index(t,index_t) */ t set s = 'a' where i = 1",
overlaySQL: "",
querySQL: "update t set s='b' where i=2",
originSQL: "update `test` . `t` set `s` = ? where `i` = ?",
bindSQL: "UPDATE /*+ use_index(`t` `index_t`)*/ `test`.`t` SET `s`='a' WHERE `i` = 1",
dropSQL: "binding for update t set s = 'a' where i = 1",
memoryUsage: float64(162),
},
{
createSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i using update /*+ inl_join(t1) */ t, t1 set t.s = 'a' where t.i = t1.i",
overlaySQL: "",
querySQL: "update t , t1 set t.s='b' where t.i=t1.i",
originSQL: "update ( `test` . `t` ) join `test` . `t1` set `t` . `s` = ? where `t` . `i` = `t1` . `i`",
bindSQL: "UPDATE /*+ inl_join(`t1`)*/ (`test`.`t`) JOIN `test`.`t1` SET `t`.`s`='a' WHERE `t`.`i` = `t1`.`i`",
dropSQL: "binding for update t, t1 set t.s = 'a' where t.i = t1.i",
memoryUsage: float64(230),
},
{
createSQL: "binding for insert into t1 select * from t where t.i = 1 using insert into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1",
overlaySQL: "",
querySQL: "insert into t1 select * from t where t.i = 2",
originSQL: "insert into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?",
bindSQL: "INSERT INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1",
dropSQL: "binding for insert into t1 select * from t where t.i = 1",
memoryUsage: float64(212),
},
{
createSQL: "binding for replace into t1 select * from t where t.i = 1 using replace into t1 select /*+ use_index(t,index_t) */ * from t where t.i = 1",
overlaySQL: "",
querySQL: "replace into t1 select * from t where t.i = 2",
originSQL: "replace into `test` . `t1` select * from `test` . `t` where `t` . `i` = ?",
bindSQL: "REPLACE INTO `test`.`t1` SELECT /*+ use_index(`t` `index_t`)*/ * FROM `test`.`t` WHERE `t`.`i` = 1",
dropSQL: "binding for replace into t1 select * from t where t.i = 1",
memoryUsage: float64(214),
},
}

func TestGlobalBinding(t *testing.T) {
store, dom, clean := testkit.CreateMockStoreAndDomain(t)
defer clean()
Expand Down
40 changes: 40 additions & 0 deletions bindinfo/session_handle_serial_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ package bindinfo_test

import (
"context"
"crypto/tls"
"strconv"
"testing"
"time"
Expand All @@ -25,6 +26,7 @@ import (
"github.com/pingcap/tidb/errno"
"github.com/pingcap/tidb/metrics"
plannercore "github.com/pingcap/tidb/planner/core"
"github.com/pingcap/tidb/session/txninfo"
"github.com/pingcap/tidb/testkit"
"github.com/pingcap/tidb/util"
"github.com/pingcap/tidb/util/stmtsummary"
Expand Down Expand Up @@ -364,6 +366,44 @@ func TestDefaultDB(t *testing.T) {
tk.MustQuery("show session bindings").Check(testkit.Rows())
}

type mockSessionManager struct {
PS []*util.ProcessInfo
}

func (msm *mockSessionManager) ShowTxnList() []*txninfo.TxnInfo {
panic("unimplemented!")
}

func (msm *mockSessionManager) ShowProcessList() map[uint64]*util.ProcessInfo {
ret := make(map[uint64]*util.ProcessInfo)
for _, item := range msm.PS {
ret[item.ID] = item
}
return ret
}

func (msm *mockSessionManager) GetProcessInfo(id uint64) (*util.ProcessInfo, bool) {
for _, item := range msm.PS {
if item.ID == id {
return item, true
}
}
return &util.ProcessInfo{}, false
}

func (msm *mockSessionManager) Kill(cid uint64, query bool) {
}

func (msm *mockSessionManager) KillAllConnections() {
}

func (msm *mockSessionManager) UpdateTLSConfig(cfg *tls.Config) {
}

func (msm *mockSessionManager) ServerID() uint64 {
return 1
}

func TestIssue19836(t *testing.T) {
store, clean := testkit.CreateMockStore(t)
defer clean()
Expand Down
7 changes: 4 additions & 3 deletions br/pkg/kv/kv.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ package kv

import (
"bytes"
"context"
"fmt"
"math"
"sort"
Expand Down Expand Up @@ -350,11 +351,11 @@ func (kvcodec *tableKVEncoder) AddRecord(
incrementalBits--
}
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.AutoRandomType)
_ = alloc.Rebase(value.GetInt64()&((1<<incrementalBits)-1), false)
_ = alloc.Rebase(context.Background(), value.GetInt64()&((1<<incrementalBits)-1), false)
}
if isAutoIncCol {
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.RowIDAllocType)
_ = alloc.Rebase(getAutoRecordID(value, &col.FieldType), false)
_ = alloc.Rebase(context.Background(), getAutoRecordID(value, &col.FieldType), false)
}
}

Expand All @@ -370,7 +371,7 @@ func (kvcodec *tableKVEncoder) AddRecord(
}
record = append(record, value)
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.RowIDAllocType)
_ = alloc.Rebase(value.GetInt64(), false)
_ = alloc.Rebase(context.Background(), value.GetInt64(), false)
}
_, err = kvcodec.tbl.AddRecord(kvcodec.se, record)
if err != nil {
Expand Down
3 changes: 2 additions & 1 deletion br/pkg/lightning/backend/kv/allocator.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package kv

import (
"context"
"sync/atomic"

"github.com/pingcap/tidb/meta/autoid"
Expand All @@ -40,7 +41,7 @@ func NewPanickingAllocators(base int64) autoid.Allocators {
}

// Rebase implements the autoid.Allocator interface
func (alloc *panickingAllocator) Rebase(newBase int64, allocIDs bool) error {
func (alloc *panickingAllocator) Rebase(ctx context.Context, newBase int64, allocIDs bool) error {
// CAS
for {
oldBase := atomic.LoadInt64(alloc.base)
Expand Down
7 changes: 4 additions & 3 deletions br/pkg/lightning/backend/kv/sql2kv.go
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@
package kv

import (
"context"
"fmt"
"math"
"math/rand"
Expand Down Expand Up @@ -376,13 +377,13 @@ func (kvcodec *tableKVEncoder) Encode(
if isAutoRandom && isPk {
incrementalBits := autoRandomIncrementBits(col, int(meta.AutoRandomBits))
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.AutoRandomType)
if err := alloc.Rebase(value.GetInt64()&((1<<incrementalBits)-1), false); err != nil {
if err := alloc.Rebase(context.Background(), value.GetInt64()&((1<<incrementalBits)-1), false); err != nil {
return nil, errors.Trace(err)
}
}
if isAutoIncCol {
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.AutoIncrementType)
if err := alloc.Rebase(getAutoRecordID(value, &col.FieldType), false); err != nil {
if err := alloc.Rebase(context.Background(), getAutoRecordID(value, &col.FieldType), false); err != nil {
return nil, errors.Trace(err)
}
}
Expand All @@ -403,7 +404,7 @@ func (kvcodec *tableKVEncoder) Encode(
}
record = append(record, value)
alloc := kvcodec.tbl.Allocators(kvcodec.se).Get(autoid.RowIDAllocType)
if err := alloc.Rebase(rowValue, false); err != nil {
if err := alloc.Rebase(context.Background(), rowValue, false); err != nil {
return nil, errors.Trace(err)
}
}
Expand Down
78 changes: 73 additions & 5 deletions br/pkg/lightning/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -242,6 +242,74 @@ func (t PostOpLevel) String() string {
}
}

type CheckpointKeepStrategy int

const (
// remove checkpoint data
CheckpointRemove CheckpointKeepStrategy = iota
// keep by rename checkpoint file/db according to task id
CheckpointRename
// keep checkpoint data unchanged
CheckpointOrigin
)

func (t *CheckpointKeepStrategy) UnmarshalTOML(v interface{}) error {
switch val := v.(type) {
case bool:
if val {
*t = CheckpointRename
} else {
*t = CheckpointRemove
}
case string:
return t.FromStringValue(val)
default:
return errors.Errorf("invalid checkpoint keep strategy '%v', please choose valid option between ['remove', 'rename', 'origin']", v)
}
return nil
}

func (t CheckpointKeepStrategy) MarshalText() ([]byte, error) {
return []byte(t.String()), nil
}

// parser command line parameter
func (t *CheckpointKeepStrategy) FromStringValue(s string) error {
switch strings.ToLower(s) {
//nolint:goconst // This 'false' and other 'false's aren't the same.
case "remove", "false":
*t = CheckpointRemove
case "rename", "true":
*t = CheckpointRename
case "origin":
*t = CheckpointOrigin
default:
return errors.Errorf("invalid checkpoint keep strategy '%s', please choose valid option between ['remove', 'rename', 'origin']", s)
}
return nil
}

func (t *CheckpointKeepStrategy) MarshalJSON() ([]byte, error) {
return []byte(`"` + t.String() + `"`), nil
}

func (t *CheckpointKeepStrategy) UnmarshalJSON(data []byte) error {
return t.FromStringValue(strings.Trim(string(data), `"`))
}

func (t CheckpointKeepStrategy) String() string {
switch t {
case CheckpointRemove:
return "remove"
case CheckpointRename:
return "rename"
case CheckpointOrigin:
return "origin"
default:
panic(fmt.Sprintf("invalid post process type '%d'", t))
}
}

// MaxError configures the maximum number of acceptable errors per kind.
type MaxError struct {
// Syntax is the maximum number of syntax errors accepted.
Expand Down Expand Up @@ -396,11 +464,11 @@ type TikvImporter struct {
}

type Checkpoint struct {
Schema string `toml:"schema" json:"schema"`
DSN string `toml:"dsn" json:"-"` // DSN may contain password, don't expose this to JSON.
Driver string `toml:"driver" json:"driver"`
Enable bool `toml:"enable" json:"enable"`
KeepAfterSuccess bool `toml:"keep-after-success" json:"keep-after-success"`
Schema string `toml:"schema" json:"schema"`
DSN string `toml:"dsn" json:"-"` // DSN may contain password, don't expose this to JSON.
Driver string `toml:"driver" json:"driver"`
Enable bool `toml:"enable" json:"enable"`
KeepAfterSuccess CheckpointKeepStrategy `toml:"keep-after-success" json:"keep-after-success"`
}

type Cron struct {
Expand Down
Loading

0 comments on commit 53fa6ac

Please sign in to comment.