Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

expression: impl cast as array function #40076

Merged
merged 6 commits into from
Dec 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions errors.toml
Original file line number Diff line number Diff line change
Expand Up @@ -1696,6 +1696,21 @@ error = '''
Invalid data type for JSON data in argument %d to function %s; a JSON string or JSON type is required.
'''

["expression:3752"]
error = '''
Value is out of range for expression index '%s' at row %d
'''

["expression:3903"]
error = '''
Invalid JSON value for CAST for expression index '%s'
'''

["expression:3907"]
error = '''
Data too long for expression index '%s'
'''

["expression:8128"]
error = '''
Invalid TABLESAMPLE: %s
Expand Down
34 changes: 30 additions & 4 deletions executor/insert_common.go
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,7 @@ func (e *InsertValues) evalRow(ctx context.Context, list []expression.Expression
e.evalBuffer.SetDatum(offset, val1)
}
// Row may lack of generated column, autoIncrement column, empty column here.
return e.fillRow(ctx, row, hasValue)
return e.fillRow(ctx, row, hasValue, rowIdx)
}

var emptyRow chunk.Row
Expand Down Expand Up @@ -422,7 +422,7 @@ func (e *InsertValues) fastEvalRow(ctx context.Context, list []expression.Expres
offset := e.insertColumns[i].Offset
row[offset], hasValue[offset] = val1, true
}
return e.fillRow(ctx, row, hasValue)
return e.fillRow(ctx, row, hasValue, rowIdx)
}

// setValueForRefColumn set some default values for the row to eval the row value with other columns,
Expand Down Expand Up @@ -562,7 +562,7 @@ func (e *InsertValues) getRow(ctx context.Context, vals []types.Datum) ([]types.
hasValue[offset] = true
}

return e.fillRow(ctx, row, hasValue)
return e.fillRow(ctx, row, hasValue, 0)
}

// getColDefaultValue gets the column default value.
Expand Down Expand Up @@ -647,7 +647,7 @@ func (e *InsertValues) fillColValue(ctx context.Context, datum types.Datum, idx
// `insert|replace values` can guarantee consecutive autoID in a batch.
// Other statements like `insert select from` don't guarantee consecutive autoID.
// https://dev.mysql.com/doc/refman/8.0/en/innodb-auto-increment-handling.html
func (e *InsertValues) fillRow(ctx context.Context, row []types.Datum, hasValue []bool) ([]types.Datum, error) {
func (e *InsertValues) fillRow(ctx context.Context, row []types.Datum, hasValue []bool, rowIdx int) ([]types.Datum, error) {
gCols := make([]*table.Column, 0)
tCols := e.Table.Cols()
if e.hasExtraHandle {
Expand Down Expand Up @@ -693,6 +693,9 @@ func (e *InsertValues) fillRow(ctx context.Context, row []types.Datum, hasValue
for i, gCol := range gCols {
colIdx := gCol.ColumnInfo.Offset
val, err := e.GenExprs[i].Eval(chunk.MutRowFromDatums(row).ToRow())
if err != nil && gCol.FieldType.IsArray() {
return nil, completeError(tbl, gCol.Offset, rowIdx, err)
}
if e.ctx.GetSessionVars().StmtCtx.HandleTruncate(err) != nil {
return nil, err
}
Expand All @@ -708,6 +711,29 @@ func (e *InsertValues) fillRow(ctx context.Context, row []types.Datum, hasValue
return row, nil
}

func completeError(tbl *model.TableInfo, offset int, rowIdx int, err error) error {
name := "expression_index"
for _, idx := range tbl.Indices {
for _, column := range idx.Columns {
if column.Offset == offset {
name = idx.Name.O
break
}
}
}

if expression.ErrInvalidJSONForFuncIndex.Equal(err) {
return expression.ErrInvalidJSONForFuncIndex.GenWithStackByArgs(name)
}
if types.ErrOverflow.Equal(err) {
return expression.ErrDataOutOfRangeFuncIndex.GenWithStackByArgs(name, rowIdx+1)
}
if types.ErrDataTooLong.Equal(err) {
return expression.ErrFuncIndexDataIsTooLong.GenWithStackByArgs(name)
}
return err
}

// isAutoNull can help judge whether a datum is AutoIncrement Null quickly.
// This used to help lazyFillAutoIncrement to find consecutive N datum backwards for batch autoID alloc.
func (e *InsertValues) isAutoNull(ctx context.Context, d types.Datum, col *table.Column) bool {
Expand Down
78 changes: 75 additions & 3 deletions expression/builtin_cast.go
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ import (
"github.com/pingcap/tidb/parser/mysql"
"github.com/pingcap/tidb/parser/terror"
"github.com/pingcap/tidb/sessionctx"
"github.com/pingcap/tidb/sessionctx/stmtctx"
"github.com/pingcap/tidb/sessionctx/variable"
"github.com/pingcap/tidb/types"
"github.com/pingcap/tidb/util/chunk"
Expand Down Expand Up @@ -420,7 +421,7 @@ func (c *castAsArrayFunctionClass) verifyArgs(args []Expression) error {
}

if args[0].GetType().EvalType() != types.ETJson {
return types.ErrInvalidJSONData.GenWithStackByArgs("1", "cast_as_array")
return ErrInvalidTypeForJSON.GenWithStackByArgs(1, "cast_as_array")
}

return nil
Expand Down Expand Up @@ -467,9 +468,80 @@ func (b *castJSONAsArrayFunctionSig) evalJSON(row chunk.Row) (res types.BinaryJS
return types.BinaryJSON{}, false, ErrNotSupportedYet.GenWithStackByArgs("CAST-ing Non-JSON Array type to array")
}

// TODO: impl the cast(... as ... array) function
arrayVals := make([]any, 0, len(b.args))
ft := b.tp.ArrayType()
f := convertJSON2Tp(ft.EvalType())
if f == nil {
return types.BinaryJSON{}, false, ErrNotSupportedYet.GenWithStackByArgs("CAS-ing JSON to the target type")
}
sc := b.ctx.GetSessionVars().StmtCtx
originalOverflowAsWarning := sc.OverflowAsWarning
originIgnoreTruncate := sc.IgnoreTruncate
originTruncateAsWarning := sc.TruncateAsWarning
sc.OverflowAsWarning = false
sc.IgnoreTruncate = false
sc.TruncateAsWarning = false
defer func() {
sc.OverflowAsWarning = originalOverflowAsWarning
sc.IgnoreTruncate = originIgnoreTruncate
sc.TruncateAsWarning = originTruncateAsWarning
}()
for i := 0; i < val.GetElemCount(); i++ {
item, err := f(sc, val.ArrayGetElem(i), ft)
if err != nil {
return types.BinaryJSON{}, false, err
}
arrayVals = append(arrayVals, item)
}
return types.CreateBinaryJSON(arrayVals), false, nil
}

return types.BinaryJSON{}, false, nil
func convertJSON2Tp(eval types.EvalType) func(*stmtctx.StatementContext, types.BinaryJSON, *types.FieldType) (any, error) {
switch eval {
case types.ETString:
return func(sc *stmtctx.StatementContext, item types.BinaryJSON, tp *types.FieldType) (any, error) {
if item.TypeCode != types.JSONTypeCodeString {
return nil, ErrInvalidJSONForFuncIndex
}
return types.ProduceStrWithSpecifiedTp(string(item.GetString()), tp, sc, false)
}
case types.ETInt:
return func(sc *stmtctx.StatementContext, item types.BinaryJSON, tp *types.FieldType) (any, error) {
if item.TypeCode != types.JSONTypeCodeInt64 && item.TypeCode != types.JSONTypeCodeUint64 {
return nil, ErrInvalidJSONForFuncIndex
}
return types.ConvertJSONToInt(sc, item, mysql.HasUnsignedFlag(tp.GetFlag()), tp.GetType())
}
case types.ETReal, types.ETDecimal:
return func(sc *stmtctx.StatementContext, item types.BinaryJSON, tp *types.FieldType) (any, error) {
if item.TypeCode != types.JSONTypeCodeInt64 && item.TypeCode != types.JSONTypeCodeUint64 && item.TypeCode != types.JSONTypeCodeFloat64 {
return nil, ErrInvalidJSONForFuncIndex
}
return types.ConvertJSONToFloat(sc, item)
}
case types.ETDatetime:
YangKeao marked this conversation as resolved.
Show resolved Hide resolved
return func(sc *stmtctx.StatementContext, item types.BinaryJSON, tp *types.FieldType) (any, error) {
if (tp.GetType() == mysql.TypeDatetime && item.TypeCode != types.JSONTypeCodeDatetime) || (tp.GetType() == mysql.TypeDate && item.TypeCode != types.JSONTypeCodeDate) {
return nil, ErrInvalidJSONForFuncIndex
}
res := item.GetTime()
res.SetType(tp.GetType())
if tp.GetType() == mysql.TypeDate {
// Truncate hh:mm:ss part if the type is Date.
res.SetCoreTime(types.FromDate(res.Year(), res.Month(), res.Day(), 0, 0, 0, 0))
}
return res, nil
}
case types.ETDuration:
return func(sc *stmtctx.StatementContext, item types.BinaryJSON, tp *types.FieldType) (any, error) {
if item.TypeCode != types.JSONTypeCodeDuration {
return nil, ErrInvalidJSONForFuncIndex
}
return item.GetDuration(), nil
}
default:
return nil
}
}

type castAsJSONFunctionClass struct {
Expand Down
70 changes: 70 additions & 0 deletions expression/builtin_cast_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -1619,3 +1619,73 @@ func TestCastBinaryStringAsJSONSig(t *testing.T) {
require.Equal(t, tt.resultStr, res.String())
}
}

func TestCastArrayFunc(t *testing.T) {
ctx := createContext(t)
tbl := []struct {
input interface{}
expected interface{}
tp *types.FieldType
success bool
buildFuncSuccess bool
}{
{
[]interface{}{int64(-1), int64(2), int64(3)},
[]interface{}{int64(-1), int64(2), int64(3)},
types.NewFieldTypeBuilder().SetType(mysql.TypeLonglong).SetCharset(charset.CharsetBin).SetCollate(charset.CollationBin).SetArray(true).BuildP(),
true,
true,
},
{
[]interface{}{int64(-1), int64(2), int64(3)},
nil,
types.NewFieldTypeBuilder().SetType(mysql.TypeString).SetCharset(charset.CharsetUTF8MB4).SetCollate(charset.CollationUTF8MB4).SetArray(true).BuildP(),
false,
true,
},
{
[]interface{}{"1"},
nil,
types.NewFieldTypeBuilder().SetType(mysql.TypeLonglong).SetCharset(charset.CharsetBin).SetCollate(charset.CharsetBin).SetArray(true).BuildP(),
false,
true,
},
{
[]interface{}{"1", "2"},
nil,
types.NewFieldTypeBuilder().SetType(mysql.TypeDouble).SetCharset(charset.CharsetBin).SetCollate(charset.CharsetBin).SetArray(true).BuildP(),
false,
true,
},
{
[]interface{}{int64(-1), 2.1, int64(3)},
[]interface{}{int64(-1), 2.1, int64(3)},
types.NewFieldTypeBuilder().SetType(mysql.TypeDouble).SetCharset(charset.CharsetBin).SetCollate(charset.CharsetBin).SetArray(true).BuildP(),
true,
true,
},
}
for _, tt := range tbl {
f, err := BuildCastFunctionWithCheck(ctx, datumsToConstants(types.MakeDatums(types.CreateBinaryJSON(tt.input)))[0], tt.tp)
if tt.buildFuncSuccess {
require.NoError(t, err, tt.input)
} else {
require.Error(t, err, tt.input)
continue
}

val, isNull, err := f.EvalJSON(ctx, chunk.Row{})
if tt.success {
require.NoError(t, err, tt.input)
if tt.expected == nil {
require.True(t, isNull, tt.input)
} else {
j1 := types.CreateBinaryJSON(tt.expected)
cmp := types.CompareBinaryJSON(j1, val)
require.Equal(t, 0, cmp, tt.input)
}
} else {
require.Error(t, err, tt.input)
}
}
}
3 changes: 3 additions & 0 deletions expression/errors.go
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,9 @@ var (
ErrInternal = dbterror.ClassOptimizer.NewStd(mysql.ErrInternal)
ErrNoDB = dbterror.ClassOptimizer.NewStd(mysql.ErrNoDB)
ErrNotSupportedYet = dbterror.ClassExpression.NewStd(mysql.ErrNotSupportedYet)
ErrInvalidJSONForFuncIndex = dbterror.ClassExpression.NewStd(mysql.ErrInvalidJSONValueForFuncIndex)
ErrDataOutOfRangeFuncIndex = dbterror.ClassExpression.NewStd(mysql.ErrDataOutOfRangeFunctionalIndex)
ErrFuncIndexDataIsTooLong = dbterror.ClassExpression.NewStd(mysql.ErrFunctionalIndexDataIsTooLong)

// All the un-exported errors are defined here:
errFunctionNotExists = dbterror.ClassExpression.NewStd(mysql.ErrSpDoesNotExist)
Expand Down
Loading