From 342f349dd9cd484d7653eb26f2012b6f5d25daba Mon Sep 17 00:00:00 2001 From: Zhang-Each <740969824@qq.com> Date: Tue, 26 Dec 2023 20:43:56 +0800 Subject: [PATCH] update README --- README.md | 35 ++++++++++++++++++----------------- 1 file changed, 18 insertions(+), 17 deletions(-) diff --git a/README.md b/README.md index 4503c04..ca67692 100644 --- a/README.md +++ b/README.md @@ -29,23 +29,6 @@ Due to the size of the data, you need to download and unzip the data file data.z - run KoPA tuning ```shell -export WANDB_DISABLED=true -wandb offline -# For CoDeX-S dataset -CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \ - --base_model 'YOUR LLM PATH' \ - --data_path 'data/CoDeX-S-train.json' \ - --output_dir 'YOUR SAVE PATH' \ - --num_epochs 3 \ - --lora_r 64 \ - --learning_rate 3e-4 \ - --batch_size 12 \ - --micro_batch_size 12 \ - --num_prefix 1 \ - --kge_model 'data/CoDeX-S-rotate.pth' \ - --lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt & - - # For CoDeX-S dataset export WANDB_DISABLED=true wandb offline @@ -61,6 +44,24 @@ CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \ --num_prefix 1 \ --kge_model 'data/CoDeX-S-rotate.pth' \ --lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt & + + +# For FB15K-237N dataset +export WANDB_DISABLED=true +wandb offline +CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \ + --base_model 'YOUR LLM PATH' \ + --data_path 'data/FB15K-237N-train.json' \ + --output_dir 'YOUR SAVE PATH' \ + --num_epochs 3 \ + --lora_r 64 \ + --learning_rate 3e-4 \ + --batch_size 12 \ + --micro_batch_size 12 \ + --num_prefix 1 \ + --kge_model 'data/FB15K-237N-rotate.pth' \ + --lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt & + ``` You may need to fill the LLM path and save path before running. The hyper-parameters can be tuned by yourself.