mirror of
https://git.adityakumar.xyz/llama.cpp.git
synced 2024-11-09 15:29:43 +00:00
Temporary bump the memory buffer size - hopefully fix issues from 483bab2e
This commit is contained in:
parent
f4f5362edb
commit
31572d9665
1 changed files with 1 additions and 1 deletions
|
@ -632,7 +632,7 @@ static bool llama_eval_internal(
|
|||
auto & mem_per_token = lctx.mem_per_token;
|
||||
|
||||
// TODO: fix this hardcoded size
|
||||
static size_t buf_size = 512u*1024*1024;
|
||||
static size_t buf_size = 2048u*1024*1024; // TMP !!!
|
||||
static void * buf = malloc(buf_size);
|
||||
|
||||
if (mem_per_token > 0 && mem_per_token*N > buf_size) {
|
||||
|
|
Loading…
Reference in a new issue