From b92dcd5ee3ed8e5907f4ba2e6e6c07e60bdff43f Mon Sep 17 00:00:00 2001 From: zhangbo9674 Date: Wed, 16 Oct 2024 12:17:44 +0000 Subject: [PATCH] fix --- .../llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json | 2 +- .../llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json b/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json index b8d74ec96a21..57bebf86696b 100644 --- a/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json +++ b/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_13b/pretrain-llama2_13b.json @@ -8,7 +8,7 @@ "per_device_eval_batch_size": 4, "tensor_parallel_degree": 1, "pipeline_parallel_degree": 4, - "sharding": "stage2", + "sharding": "stage1", "data_parallel_config": "enable_allreduce_avg_in_gradinent_scale gradient_sync_after_accumulate", "sharding_parallel_config": "enable_stage2_overlap", "tensor_parallel_config": "enable_mp_async_allreduce", diff --git a/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json b/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json index ae1e3012274d..6b89e3fd1fe4 100644 --- a/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json +++ b/tests/test_tipc/static/auto_parallel/llama2/pretrain_config_llama2_7b/pretrain-llama2_7b.json @@ -8,7 +8,7 @@ "per_device_eval_batch_size": 2, "tensor_parallel_degree": 1, "pipeline_parallel_degree": 1, - "sharding": "stage2", + "sharding": "stage1", "data_parallel_config": "enable_allreduce_avg_in_gradinent_scale gradient_sync_after_accumulate", "sharding_parallel_config": "enable_stage2_overlap", "tensor_parallel_config": "enable_mp_async_allreduce",