Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update part of codebase to use new for := range syntax #16738

Merged
merged 2 commits into from
Sep 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion go/mysql/auth_server.go
Original file line number Diff line number Diff line change
Expand Up @@ -601,7 +601,7 @@ func newSalt() ([]byte, error) {
}

// Salt must be a legal UTF8 string.
for i := 0; i < len(salt); i++ {
for i := range len(salt) {
salt[i] &= 0x7f
if salt[i] == '\x00' || salt[i] == '$' {
salt[i]++
Expand Down
8 changes: 4 additions & 4 deletions go/mysql/binlog/binlog_json.go
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,7 @@ var binaryIntSizes = map[jsonDataType]int{
func binparserInt(typ jsonDataType, data []byte, pos int) (*json.Value, error) {
var val uint64
size := binaryIntSizes[typ]
for i := 0; i < size; i++ {
for i := range size {
val = val + uint64(data[pos+i])<<(8*i)
}
var s string
Expand Down Expand Up @@ -344,7 +344,7 @@ func binparserArray(typ jsonDataType, data []byte, pos int) (node *json.Value, e
large := typ == jsonLargeArray
elementCount, pos = readInt(data, pos, large)
_, pos = readInt(data, pos, large)
for i := 0; i < elementCount; i++ {
for range elementCount {
elem, pos, err = binparserElement(data, pos, large)
if err != nil {
return nil, err
Expand All @@ -366,7 +366,7 @@ func binparserObject(typ jsonDataType, data []byte, pos int) (node *json.Value,
_, pos = readInt(data, pos, large)

keys := make([]string, elementCount) // stores all the keys in this object
for i := 0; i < elementCount; i++ {
for i := range elementCount {
var keyOffset int
var keyLength int
keyOffset, pos = readInt(data, pos, large)
Expand All @@ -384,7 +384,7 @@ func binparserObject(typ jsonDataType, data []byte, pos int) (node *json.Value,
var elem *json.Value

// get the value for each key
for i := 0; i < elementCount; i++ {
for i := range elementCount {
elem, pos, err = binparserElement(data, pos, large)
if err != nil {
return nil, err
Expand Down
6 changes: 3 additions & 3 deletions go/mysql/binlog/rbr.go
Original file line number Diff line number Diff line change
Expand Up @@ -542,7 +542,7 @@ func CellValue(data []byte, pos int, typ byte, metadata uint16, field *querypb.F
}

// now the full digits, 32 bits each, 9 digits
for i := 0; i < intg0; i++ {
for range intg0 {
val = binary.BigEndian.Uint32(d[pos : pos+4])
fmt.Fprintf(txt, "%09d", val)
pos += 4
Expand All @@ -564,7 +564,7 @@ func CellValue(data []byte, pos int, typ byte, metadata uint16, field *querypb.F
txt.WriteByte('.')

// now the full fractional digits
for i := 0; i < frac0; i++ {
for range frac0 {
val = binary.BigEndian.Uint32(d[pos : pos+4])
fmt.Fprintf(txt, "%09d", val)
pos += 4
Expand Down Expand Up @@ -718,7 +718,7 @@ func CellValue(data []byte, pos int, typ byte, metadata uint16, field *querypb.F
// numbers.
l := int(metadata & 0xff)
var val uint64
for i := 0; i < l; i++ {
for i := range l {
val += uint64(data[pos+i]) << (uint(i) * 8)
}
return sqltypes.MakeTrusted(querypb.Type_UINT64,
Expand Down
2 changes: 1 addition & 1 deletion go/mysql/binlog_event.go
Original file line number Diff line number Diff line change
Expand Up @@ -328,7 +328,7 @@ func (b *Bitmap) Set(index int, value bool) {
// hence the non-efficient logic.
func (b *Bitmap) BitCount() int {
sum := 0
for i := 0; i < b.count; i++ {
for i := range b.count {
if b.Bit(i) {
sum++
}
Expand Down
10 changes: 5 additions & 5 deletions go/mysql/binlog_event_make_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -254,7 +254,7 @@ func TestLargeTableMapEvent(t *testing.T) {
types := make([]byte, 0, colLen)
metadata := make([]uint16, 0, colLen)

for i := 0; i < colLen; i++ {
for range colLen {
types = append(types, binlog.TypeLongLong)
metadata = append(metadata, 0)
}
Expand Down Expand Up @@ -429,7 +429,7 @@ func TestLargeRowsEvent(t *testing.T) {
types := make([]byte, 0, colLen)
metadata := make([]uint16, 0, colLen)

for i := 0; i < colLen; i++ {
for range colLen {
types = append(types, binlog.TypeLong)
metadata = append(metadata, 0)
}
Expand All @@ -446,7 +446,7 @@ func TestLargeRowsEvent(t *testing.T) {

identify := make([]byte, 0, colLen*4)
data := make([]byte, 0, colLen*4)
for i := 0; i < colLen; i++ {
for range colLen {
identify = append(identify, 0x10, 0x20, 0x30, 0x40)
data = append(data, 0x10, 0x20, 0x30, 0x40)
}
Expand All @@ -467,7 +467,7 @@ func TestLargeRowsEvent(t *testing.T) {
}

// All rows are included, none are NULL.
for i := 0; i < colLen; i++ {
for i := range colLen {
rows.IdentifyColumns.Set(i, true)
rows.DataColumns.Set(i, true)
}
Expand All @@ -476,7 +476,7 @@ func TestLargeRowsEvent(t *testing.T) {
// 1076895760 is 0x40302010.
identifies, _ := rows.StringIdentifiesForTests(tm, 0)
expected := make([]string, 0, colLen)
for i := 0; i < colLen; i++ {
for range colLen {
expected = append(expected, "1076895760")
}
if !reflect.DeepEqual(identifies, expected) {
Expand Down
2 changes: 1 addition & 1 deletion go/mysql/collations/charset/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ func Slice(charset Charset, input []byte, from, to int) []byte {
}
iter := input
start := 0
for i := 0; i < to; i++ {
for i := range to {
r, size := charset.DecodeRune(iter)
if r == RuneError && size < 2 {
break
Expand Down
4 changes: 2 additions & 2 deletions go/mysql/collations/colldata/8bit.go
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ func (c *Collation_8bit_simple_ci) Collate(left, right []byte, rightIsPrefix boo
sortOrder := c.sort
cmpLen := min(len(left), len(right))

for i := 0; i < cmpLen; i++ {
for i := range cmpLen {
sortL, sortR := sortOrder[left[i]], sortOrder[right[i]]
if sortL != sortR {
return int(sortL) - int(sortR)
Expand All @@ -174,7 +174,7 @@ func (c *Collation_8bit_simple_ci) TinyWeightString(src []byte) uint32 {
var w32 [4]byte
sortOrder := c.sort
sortLen := min(4, len(src))
for i := 0; i < sortLen; i++ {
for i := range sortLen {
w32[i] = sortOrder[src[i]]
}
return binary.BigEndian.Uint32(w32[:4])
Expand Down
6 changes: 3 additions & 3 deletions go/mysql/collations/colldata/uca_contraction_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ func benchmarkFind(b *testing.B, input []byte, contract uca.Contractor) {
b.ReportAllocs()
b.ResetTimer()

for n := 0; n < b.N; n++ {
for range b.N {
in := input
for len(in) > 0 {
cp, width := utf8.DecodeRune(in)
Expand All @@ -144,7 +144,7 @@ func benchmarkFindJA(b *testing.B, input []byte, contract uca.Contractor) {
b.ReportAllocs()
b.ResetTimer()

for n := 0; n < b.N; n++ {
for range b.N {
prev := rune(0)
in := input
for len(in) > 0 {
Expand All @@ -166,7 +166,7 @@ func newStrgen() *strgen {
}

func (s *strgen) withASCII() *strgen {
for r := rune(0); r < utf8.RuneSelf; r++ {
for r := range rune(utf8.RuneSelf) {
s.repertoire[r] = struct{}{}
}
return s
Expand Down
6 changes: 3 additions & 3 deletions go/mysql/collations/colldata/uca_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ func TestIsPrefix(t *testing.T) {
coll := testcollation(t, collName)
input := []rune(strings.ToUpper(ExampleStringLong))

for size := 0; size < len(input); size++ {
for size := range len(input) {
left := ExampleStringLong
right := string(input[:size])

Expand Down Expand Up @@ -777,7 +777,7 @@ func BenchmarkAllUCAWeightStrings(b *testing.B) {

b.ResetTimer()

for i := 0; i < b.N; i++ {
for range b.N {
_ = collation.WeightString(buf, input, 0)
}
})
Expand Down Expand Up @@ -1058,7 +1058,7 @@ func BenchmarkUCA900Collation(b *testing.B) {
str2 := []byte(strings.ToUpper(content))
for _, collation := range Collations {
b.Run(fmt.Sprintf("%s/%d/%s", teststr.Name, length, collation.Name()), func(b *testing.B) {
for i := 0; i < b.N; i++ {
for range b.N {
_ = collation.Collate(str1, str2, false)
}
})
Expand Down
2 changes: 1 addition & 1 deletion go/mysql/collations/internal/uca/iter_fast_900.go
Original file line number Diff line number Diff line change
Expand Up @@ -301,7 +301,7 @@ func (it *FastIterator900) NextWeightBlock64(dstbytes []byte) int {

// Slow path: just loop up to 8 times to fill the buffer and bail
// early if we exhaust the iterator.
for i := 0; i < 8; i++ {
for i := range 8 {
w, ok := it.Next()
if !ok {
return i * 2
Expand Down
8 changes: 4 additions & 4 deletions go/mysql/collations/internal/uca/layout.go
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ func equalWeights900(table Weights, levels int, A, B rune) bool {
cA := int((*pageA)[offsetA])
cB := int((*pageB)[offsetB])

for l := 0; l < levels; l++ {
for l := range levels {
wA, wB := l*256, l*256
wA1, wB1 := wA+(cA*256*3), wB+(cB*256*3)

Expand Down Expand Up @@ -118,7 +118,7 @@ func (Layout_uca900) DebugWeights(table Weights, codepoint rune) (result []uint1
}

ceCount := int((*page)[offset])
for ce := 0; ce < ceCount; ce++ {
for ce := range ceCount {
result = append(result,
(*page)[256+(ce*3+0)*256+offset],
(*page)[256+(ce*3+1)*256+offset],
Expand Down Expand Up @@ -264,8 +264,8 @@ func (Layout_uca_legacy) allocPage(original *[]uint16, patches []Patch) []uint16
}

newPage := make([]uint16, minLenForPage)
for i := 0; i < CodepointsPerPage; i++ {
for j := 0; j < originalStride; j++ {
for i := range CodepointsPerPage {
for range originalStride {
newPage[1+i*newStride] = (*original)[1+i*originalStride]
}
}
Expand Down
6 changes: 3 additions & 3 deletions go/mysql/collations/tools/makecolldata/codegen/tablegen.go
Original file line number Diff line number Diff line change
Expand Up @@ -151,7 +151,7 @@ func (p *page) weights900Fast(level int) (w []uint32) {
if p.entryCount == 0 {
return nil
}
for i := 0; i < 128; i++ {
for i := range 128 {
entry := &p.entries[i]
if len(entry.weights) > 3 {
panic("trying to dump fast weights for codepoint with >3 weights")
Expand All @@ -165,7 +165,7 @@ func (p *page) weights900Fast(level int) (w []uint32) {
}
w = append(w, weight)
}
for i := 0; i < 128; i++ {
for range 128 {
w = append(w, 0x0)
}
return
Expand All @@ -179,7 +179,7 @@ func (p *page) weights900() (w []uint16) {
for _, entry := range p.entries {
w = append(w, uint16(len(entry.weights)/3))
}
for level := 0; level < maxCollations; level++ {
for level := range maxCollations {
for _, entry := range p.entries {
var weight uint16
if level < len(entry.weights) {
Expand Down
2 changes: 1 addition & 1 deletion go/mysql/collations/vindex/unicode/norm/composition.go
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ func (rb *reorderBuffer) doFlush() bool {

// appendFlush appends the normalized segment to rb.out.
func appendFlush(rb *reorderBuffer) bool {
for i := 0; i < rb.nrune; i++ {
for i := range rb.nrune {
start := rb.rune[i].pos
end := start + rb.rune[i].size
rb.out = append(rb.out, rb.byte[start:end]...)
Expand Down
2 changes: 1 addition & 1 deletion go/mysql/conn.go
Original file line number Diff line number Diff line change
Expand Up @@ -1249,7 +1249,7 @@ func (c *Conn) handleComPrepare(handler Handler, data []byte) (kontinue bool) {
}

bindVars := make(map[string]*querypb.BindVariable, paramsCount)
for i := uint16(0); i < paramsCount; i++ {
for i := range uint16(paramsCount) {
parameterID := fmt.Sprintf("v%d", i+1)
bindVars[parameterID] = &querypb.BindVariable{}
}
Expand Down
4 changes: 2 additions & 2 deletions go/mysql/conn_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -744,7 +744,7 @@ func TestEOFOrLengthEncodedIntFuzz(t *testing.T) {
cConn.Close()
}()

for i := 0; i < 100; i++ {
for range 100 {
bytes := make([]byte, rand.IntN(16)+1)
_, err := crypto_rand.Read(bytes)
require.NoError(t, err, "error doing rand.Read")
Expand Down Expand Up @@ -1001,7 +1001,7 @@ func TestPrepareAndExecute(t *testing.T) {
// and check that the handler received the correct input
ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
defer cancel()
for i := 0; i < 100; i++ {
for i := range 100 {
startGoRoutine(ctx, t, fmt.Sprintf("%d:%s", i, randSeq(i)))
}

Expand Down
8 changes: 4 additions & 4 deletions go/mysql/datetime/helpers.go
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ func appendInt(b []byte, x int, width int) []byte {
// match reports whether s1 and s2 match ignoring case.
// It is assumed s1 and s2 are the same length.
func match(s1, s2 string) bool {
for i := 0; i < len(s1); i++ {
for i := range len(s1) {
c1 := s1[i]
c2 := s2[i]
if c1 != c2 {
Expand Down Expand Up @@ -153,7 +153,7 @@ func isDigit[bytes []byte | string](s bytes, i int) bool {
func isNumber[bytes []byte | string](s bytes) (int, bool) {
var dot bool
pos := -1
for i := 0; i < len(s); i++ {
for i := range len(s) {
if !isDigit(s, i) {
if dot {
return i, true
Expand Down Expand Up @@ -189,7 +189,7 @@ func getnum(s string, fixed bool) (int, string, bool) {

func getnuml(s string, l int) (int, string, bool) {
var res int
for i := 0; i < l; i++ {
for i := range l {
if !isDigit(s, i) {
return 0, s, false
}
Expand Down Expand Up @@ -273,7 +273,7 @@ func parseNanoseconds[bytes []byte | string](value bytes, nbytes int) (ns int, l
// We need nanoseconds, which means scaling by the number
// of missing digits in the format, maximum length 10.
scaleDigits := 10 - nbytes
for i := 0; i < scaleDigits; i++ {
for range scaleDigits {
ns *= 10
}

Expand Down
Loading
Loading