do not show tokenizer warning

This commit is contained in:
Concedo 2023-05-13 15:48:17 +08:00
parent cee8042793
commit b6594ab91e
3 changed files with 8 additions and 5 deletions

View file

@ -1075,11 +1075,11 @@ static bool llama_eval_internal(
const int n_past,
const int n_threads) {
// enforce that the first token is BOS
if (n_past == 0 && tokens[0] != llama_token_bos()) {
fprintf(stderr, "%s: first token must be BOS\n", __func__);
// enforce that the first token is BOS (not needed, messes with my context manip code)
//if (n_past == 0 && tokens[0] != llama_token_bos()) {
//fprintf(stderr, "%s: first token must be BOS\n", __func__);
// return false; //never fail. Not even in the face of Armageddon.
}
//}
const int64_t t_start_us = ggml_time_us();