Skip to content

Commit

Permalink
feat: quicklist iter support start end param
Browse files Browse the repository at this point in the history
  • Loading branch information
xgzlucario committed Jul 12, 2024
1 parent 5f86952 commit bf3f6c3
Show file tree
Hide file tree
Showing 9 changed files with 91 additions and 147 deletions.
4 changes: 2 additions & 2 deletions command.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ func equalCommand(str, lowerText string) bool {
return false
}
const s = 'a' - 'A'
for i, lo := range lowerText {
delta := lo - rune(str[i])
for i, lt := range lowerText {
delta := lt - rune(str[i])
if delta != 0 && delta != s {
return false
}
Expand Down
9 changes: 5 additions & 4 deletions command_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -97,10 +97,8 @@ func TestCommand(t *testing.T) {
assert.Equal(resm, map[string]string{"k1": "v1", "k2": "v2", "k3": "v3", "k4": "v4", "k5": "v5"})

// hdel
{
res, _ := rdb.HDel(ctx, "map", "k1", "k2", "k3", "k99").Result()
assert.Equal(res, int64(3))
}
res, _ = rdb.HDel(ctx, "map", "k1", "k2", "k3", "k99").Result()
assert.Equal(res, int64(3))

// error
_, err := rdb.HSet(ctx, "map").Result()
Expand All @@ -123,6 +121,9 @@ func TestCommand(t *testing.T) {
res, _ := rdb.LRange(ctx, "list", 0, -1).Result()
assert.Equal(res, []string{"c", "b", "a", "d", "e", "f"})

res, _ = rdb.LRange(ctx, "list", 1, 3).Result()
assert.Equal(res, []string{"b", "a"})

// lpop
val, _ := rdb.LPop(ctx, "list").Result()
assert.Equal(val, "c")
Expand Down
18 changes: 16 additions & 2 deletions internal/list/bench_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -31,20 +31,34 @@ func BenchmarkList(b *testing.B) {
ls.RPop()
}
})
b.Run("range", func(b *testing.B) {
b.Run("range_all", func(b *testing.B) {
ls := genList(0, 1000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
ls.Range(0, -1, func([]byte) {})
}
})
b.Run("revrange", func(b *testing.B) {
b.Run("range_100", func(b *testing.B) {
ls := genList(0, 1000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
ls.Range(0, 100, func([]byte) {})
}
})
b.Run("revrange_all", func(b *testing.B) {
ls := genList(0, 1000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
ls.RevRange(0, -1, func([]byte) {})
}
})
b.Run("revrange_100", func(b *testing.B) {
ls := genList(0, 1000)
b.ResetTimer()
for i := 0; i < b.N; i++ {
ls.RevRange(0, 100, func([]byte) {})
}
})
}

func BenchmarkListPack(b *testing.B) {
Expand Down
54 changes: 42 additions & 12 deletions internal/list/list.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,10 +18,6 @@ type Node struct {
prev, next *Node
}

func SetMaxListPackSize(s int) {
maxListPackSize = s
}

// New create a quicklist instance.
func New() *QuickList {
n := newNode()
Expand Down Expand Up @@ -97,22 +93,56 @@ func (ls *QuickList) Range(start, end int, f func(data []byte)) {
if end == -1 {
end = math.MaxInt
}
for lp := ls.head; lp != nil; lp = lp.next {
it := lp.Iterator().SeekBegin()
for !it.IsEnd() {
f(it.Next())
count := end - start

lp := ls.head
for lp != nil && start > lp.Size() {
start -= lp.Size()
lp = lp.next
}

it := lp.Iterator().SeekBegin()
for range start {
it.Next()
}

for range count {
if it.IsEnd() {
if lp.next == nil {
return
}
lp = lp.next
it = lp.Iterator().SeekBegin()
}
f(it.Next())
}
}

func (ls *QuickList) RevRange(start, end int, f func(data []byte)) {
if end == -1 {
end = math.MaxInt
}
for lp := ls.tail; lp != nil; lp = lp.prev {
it := lp.Iterator().SeekEnd()
for !it.IsBegin() {
f(it.Prev())
count := end - start

lp := ls.tail
for lp != nil && start > lp.Size() {
start -= lp.Size()
lp = lp.prev
}

it := lp.Iterator().SeekEnd()
for range start {
it.Prev()
}

for range count {
if it.IsBegin() {
if lp.prev == nil {
return
}
lp = lp.prev
it = lp.Iterator().SeekEnd()
}
f(it.Prev())
}
}
21 changes: 19 additions & 2 deletions internal/list/list_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,8 @@ func list2slice(ls *QuickList) (res []string) {
}

func TestList(t *testing.T) {
const N = 1000
const N = 10000
assert := assert.New(t)
SetMaxListPackSize(128)

t.Run("lpush", func(t *testing.T) {
ls := New()
Expand Down Expand Up @@ -87,6 +86,15 @@ func TestList(t *testing.T) {
i++
})
assert.Equal(i, N)

for _, start := range []int{100, 1000, 5000} {
i = 0
ls.Range(start, start+100, func(data []byte) {
assert.Equal(string(data), genKey(start+i))
i++
})
assert.Equal(i, 100)
}
})

t.Run("revrange", func(t *testing.T) {
Expand All @@ -97,5 +105,14 @@ func TestList(t *testing.T) {
i++
})
assert.Equal(i, N)

for _, start := range []int{100, 1000, 5000} {
i = 0
ls.RevRange(start, start+100, func(data []byte) {
assert.Equal(string(data), genKey(N-start-i-1))
i++
})
assert.Equal(i, 100)
}
})
}
7 changes: 4 additions & 3 deletions internal/list/listpack.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,12 @@ import (
"github.com/xgzlucario/rotom/internal/pkg"
)

var (
const (
maxListPackSize = 8 * 1024
)

bpool = pkg.NewBufferPool()

var (
bpool = pkg.NewBufferPool()
encoder, _ = zstd.NewWriter(nil)
decoder, _ = zstd.NewReader(nil)
)
Expand Down
38 changes: 0 additions & 38 deletions internal/sstring/sstring.go

This file was deleted.

77 changes: 0 additions & 77 deletions internal/sstring/sstring_test.go

This file was deleted.

10 changes: 3 additions & 7 deletions rotom.go
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ func ProcessQueryBuf(client *Client) {
} else {
err := ErrUnknownCommand(command)
client.replyWriter.WriteError(err)
log.Warn().Msgf("ERR %v", err)
log.Error().Msg(err.Error())
}
}

Expand Down Expand Up @@ -197,12 +197,8 @@ func initServer(config *Config) (err error) {
}

func SyncAOF(loop *AeLoop, id int, extra interface{}) {
if db.aof == nil {
return
}
err := db.aof.Flush()
if err != nil {
log.Error().Msgf("flush aof buffer error: %v", err)
if err := db.aof.Flush(); err != nil {
log.Error().Msgf("sync aof error: %v", err)
}
}

Expand Down

0 comments on commit bf3f6c3

Please sign in to comment.