-
Notifications
You must be signed in to change notification settings - Fork 3
/
world_tokenizer.go
246 lines (222 loc) · 5.64 KB
/
world_tokenizer.go
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
// Copyright (c) seasonjs. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
package rwkv
import (
"bufio"
"embed"
"errors"
"fmt"
"log"
"strconv"
"strings"
"unicode/utf8"
)
//go:embed rwkv_vocab_v20230424.txt
var worldTokenizerFS embed.FS
// TrieNode represents a node in the trie
type TrieNode struct {
to map[string]*TrieNode
values map[string]bool
}
// Trie represents the trie data structure
type Trie struct {
Root *TrieNode
}
// NewTrieNode initializes a new trie node
func NewTrieNode() *TrieNode {
return &TrieNode{
to: make(map[string]*TrieNode),
values: make(map[string]bool),
}
}
// Add inserts a key into the trie
func (t *Trie) Add(val string) {
node := t.Root
for _, ch := range []rune(val) {
char := string(ch)
if node.to[char] == nil {
node.to[char] = NewTrieNode()
}
node = node.to[char]
}
node.values[val] = true
}
// FindLongest finds the longest match in the trie for the given key
func (t *Trie) FindLongest(key []rune) string {
node := t.Root
var matchedKey []rune
pos := 0
for i, ch := range key {
char := string(ch)
if node.to[char] == nil {
break
}
node = node.to[char]
if len(node.values) > 0 {
pos = i + 1
matchedKey = key[:pos]
}
}
return string(matchedKey)
}
// WorldTokenizer represents a tokenizer for encoding and decoding bytes to tokens
type WorldTokenizer struct {
IndexToToken map[int]string
TokenToIndex map[string]int
Trie *Trie
}
// NewWorldTokenizer initializes a new world tokenizer
func NewWorldTokenizer() (*WorldTokenizer, error) {
f, err := worldTokenizerFS.Open("rwkv_vocab_v20230424.txt")
if err != nil {
return nil, err
}
defer f.Close()
wt := &WorldTokenizer{
IndexToToken: make(map[int]string),
TokenToIndex: make(map[string]int),
Trie: &Trie{Root: NewTrieNode()},
}
scanner := bufio.NewScanner(f)
nonStandardToken := 0
for scanner.Scan() {
line := scanner.Text()
fIndex := strings.Index(line, " ")
lIndex := strings.LastIndex(line, " ")
index, err := strconv.Atoi(line[:fIndex])
if err != nil {
return nil, err
}
expectLen, err := strconv.Atoi(line[lIndex+1:])
if err != nil {
return nil, err
}
rest := line[fIndex+1 : lIndex]
token, err := parseBytes(rest)
if err != nil {
return nil, err
}
if expectLen != len(token) {
if strings.ContainsRune(token, utf8.RuneError) {
nonStandardToken++
}
//log.Print("parse vocabulary to bytes fail,token index is:", index,
// " token is: ", token,
// ", except length is: ", expectLen,
// ", parse length is: ", len(token),
// ", peek token bytes: ", []rune(token))
}
wt.IndexToToken[index] = token
wt.TokenToIndex[token] = index
wt.Trie.Add(token)
}
if nonStandardToken > 0 {
log.Print("rwkv_vocab_v20230424.txt contains non-standard utf-8 total: ",
nonStandardToken,
". they will not affect your normal use")
}
if err := scanner.Err(); err != nil {
return nil, err
}
return wt, nil
}
// EncodeBytes encodes bytes to tokens
func (wt *WorldTokenizer) EncodeBytes(src []rune) ([]int, error) {
var tokens []int
idx := 0
for idx < len(src) {
matchedKey := wt.Trie.FindLongest(src[idx:])
if len(matchedKey) <= 0 {
return nil, fmt.Errorf("can't encode current language: %s", string(src[idx:]))
}
idx += len([]rune(matchedKey))
tokens = append(tokens, wt.TokenToIndex[matchedKey])
}
return tokens, nil
}
// DecodeBytes decodes tokens to bytes
func (wt *WorldTokenizer) DecodeBytes(tokens []int) []rune {
var result []rune
for _, token := range tokens {
result = append(result, []rune(wt.IndexToToken[token])...)
}
return result
}
// Encode encodes a string to tokens
func (wt *WorldTokenizer) Encode(src string) ([]int, error) {
return wt.EncodeBytes([]rune(src))
}
// Decode decodes tokens to a string
func (wt *WorldTokenizer) Decode(tokens []int) string {
return string(wt.DecodeBytes(tokens))
}
func parseBytes(s string) (string, error) {
if strings.HasPrefix(s, "b'") && strings.HasSuffix(s, "'") && len(s) > 3 {
// handle b'...'
s = s[1:]
}
if len(s) <= 0 {
return "", errors.New("rwkv_vocab_v20230424.txt vocab list broke, got vocab length equal zero")
}
// handle ''
if strings.HasPrefix(s, "'") && strings.HasSuffix(s, "'") {
s = s[1 : len(s)-1]
b := encodeUtf8(s)
b = ignoreBackslashes(b, false)
return b, nil
}
// handle ""
if strings.HasPrefix(s, "\"") && strings.HasSuffix(s, "\"") {
b := ignoreBackslashes(s[1:len(s)-1], true)
return b, nil
}
//
return s, nil
}
// we should encodeUtf8 by python style.
func encodeUtf8(in string) string {
var backslash byte = '\\'
// Handle quoted strings with escape sequences.
var buf []byte
in0 := in
buf = make([]byte, 0, (len(in)+1)*3/2) // try to avoid more allocations
for len(in) > 0 {
r, multibyte, rem, err := strconv.UnquoteChar(in, backslash)
if err != nil {
return in0
}
in = rem
// Append the character if unescaping the input.
if r < utf8.RuneSelf || !multibyte {
buf = append(buf, byte(r))
} else {
var arr [utf8.UTFMax]byte
n := utf8.EncodeRune(arr[:], r)
buf = append(buf, arr[:n]...)
}
}
return string(buf)
}
// we should ignoreBackslashes by python style.
func ignoreBackslashes(in string, doubleQuotes bool) string {
var backslash byte = '\\'
var quoteChar byte = '\''
var buf []byte
buf = make([]byte, 0, (len(in)+1)*3/2)
for index, ch := range []byte(in) {
if ch == backslash && len(in) > index+1 {
if doubleQuotes {
if in[index+1] != backslash {
buf = append(buf, ch)
}
} else {
if in[index+1] != quoteChar {
buf = append(buf, ch)
}
}
} else {
buf = append(buf, ch)
}
}
return string(buf)
}