diff --git a/xtuner_config/llama3_8b_instruct_qlora_alpaca_e3_M_ruozhi_scM.py b/xtuner_config/llama3_8b_instruct_qlora_alpaca_e3_M_ruozhi_scM.py index 3c21a00..5cb1a26 100644 --- a/xtuner_config/llama3_8b_instruct_qlora_alpaca_e3_M_ruozhi_scM.py +++ b/xtuner_config/llama3_8b_instruct_qlora_alpaca_e3_M_ruozhi_scM.py @@ -28,7 +28,7 @@ pretrained_model_name_or_path = '/root/share/new_models/meta-llama/Meta-Llama-3- use_varlen_attn = True # new # Data -data_path = '/root/StableCascade/emollm2/EmoLLM/datasets/processed/combined_sc_ruozhi.jsonl' +data_path = '../datasets/processed/combined_sc_ruozhi.jsonl' prompt_template = PROMPT_TEMPLATE.llama3_chat max_length = 4096 pack_to_max_length = True