mirror of
https://git.adityakumar.xyz/llama.cpp.git
synced 2024-11-09 15:29:43 +00:00
Fix UTF-8 handling (including colors) (#79)
This commit is contained in:
parent
d1f224712d
commit
2a20f48efa
2 changed files with 27 additions and 11 deletions
|
@ -22,7 +22,6 @@ import json
|
||||||
import struct
|
import struct
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import torch
|
import torch
|
||||||
|
|
||||||
from sentencepiece import SentencePieceProcessor
|
from sentencepiece import SentencePieceProcessor
|
||||||
|
|
||||||
if len(sys.argv) < 3:
|
if len(sys.argv) < 3:
|
||||||
|
@ -101,12 +100,28 @@ for p in range(n_parts):
|
||||||
|
|
||||||
# Is this correct??
|
# Is this correct??
|
||||||
for i in range(32000):
|
for i in range(32000):
|
||||||
# TODO: this is probably wrong - not sure how this tokenizer works
|
if tokenizer.is_unknown(i):
|
||||||
text = tokenizer.decode([29889, i]).encode('utf-8')
|
# "<unk>" token (translated as ??)
|
||||||
# remove the first byte (it's always '.')
|
text = " \u2047 ".encode("utf-8")
|
||||||
text = text[1:]
|
fout.write(struct.pack("i", len(text)))
|
||||||
fout.write(struct.pack("i", len(text)))
|
fout.write(text)
|
||||||
fout.write(text)
|
elif tokenizer.is_control(i):
|
||||||
|
# "<s>"/"</s>" tokens
|
||||||
|
fout.write(struct.pack("i", 0))
|
||||||
|
elif tokenizer.is_byte(i):
|
||||||
|
# "<U+XX>" tokens (which may be invalid UTF-8)
|
||||||
|
piece = tokenizer.id_to_piece(i)
|
||||||
|
if len(piece) != 6:
|
||||||
|
print("Invalid token: " + piece)
|
||||||
|
sys.exit(1)
|
||||||
|
byte_value = int(piece[3:-1], 16)
|
||||||
|
fout.write(struct.pack("i", 1))
|
||||||
|
fout.write(struct.pack("B", byte_value))
|
||||||
|
else:
|
||||||
|
# normal token. Uses U+2581 (LOWER ONE EIGHTH BLOCK) to represent spaces.
|
||||||
|
text = tokenizer.id_to_piece(i).replace("\u2581", " ").encode("utf-8")
|
||||||
|
fout.write(struct.pack("i", len(text)))
|
||||||
|
fout.write(text)
|
||||||
|
|
||||||
for k, v in model.items():
|
for k, v in model.items():
|
||||||
name = k
|
name = k
|
||||||
|
|
9
main.cpp
9
main.cpp
|
@ -939,6 +939,11 @@ int main(int argc, char ** argv) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// reset color to default if we there is no pending user input
|
||||||
|
if (!input_noecho && params.use_color && embd_inp.size() == input_consumed) {
|
||||||
|
printf(ANSI_COLOR_RESET);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// display text
|
// display text
|
||||||
|
@ -946,10 +951,6 @@ int main(int argc, char ** argv) {
|
||||||
for (auto id : embd) {
|
for (auto id : embd) {
|
||||||
printf("%s", vocab.id_to_token[id].c_str());
|
printf("%s", vocab.id_to_token[id].c_str());
|
||||||
}
|
}
|
||||||
// reset color to default if we there is no pending user input
|
|
||||||
if (params.use_color && embd_inp.size() <= input_consumed) {
|
|
||||||
printf(ANSI_COLOR_RESET);
|
|
||||||
}
|
|
||||||
fflush(stdout);
|
fflush(stdout);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue