fix: adapt the new topo

Signed-off-by: DeEMO <yzzxrx@gmail.com>
This commit is contained in:
DeEMO 2025-05-16 17:03:36 +08:00
parent df16b1876f
commit 8b61cb2fa4
3 changed files with 18 additions and 3 deletions

View file

@ -143,8 +143,8 @@ int main(int argc, char ** argv) {
return 1;
}
const uint32_t n_world = params.n_world;
const uint32_t my_rank = params.rank;
uint32_t n_world = params.n_world;
uint32_t my_rank = params.rank;
GGML_ASSERT(!(n_world == 1 && my_rank > 0));
// check if --n-layer-window and --world is matched
@ -200,6 +200,9 @@ int main(int argc, char ** argv) {
// load the model and apply lora adapter, if any
LOG_INF("%s: load the model and apply lora adapter, if any\n", __func__);
llama_init_result llama_init = llama_init_from_gpt_params(params);
// update
my_rank = params.rank;
n_world = params.n_world;
model = llama_init.model;
ctx = llama_init.context;