commit
9e5c0c5bb8
35
README.md
35
README.md
@ -29,23 +29,6 @@ Due to the size of the data, you need to download and unzip the data file data.z
|
||||
|
||||
- run KoPA tuning
|
||||
```shell
|
||||
export WANDB_DISABLED=true
|
||||
wandb offline
|
||||
# For CoDeX-S dataset
|
||||
CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \
|
||||
--base_model 'YOUR LLM PATH' \
|
||||
--data_path 'data/CoDeX-S-train.json' \
|
||||
--output_dir 'YOUR SAVE PATH' \
|
||||
--num_epochs 3 \
|
||||
--lora_r 64 \
|
||||
--learning_rate 3e-4 \
|
||||
--batch_size 12 \
|
||||
--micro_batch_size 12 \
|
||||
--num_prefix 1 \
|
||||
--kge_model 'data/CoDeX-S-rotate.pth' \
|
||||
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt &
|
||||
|
||||
|
||||
# For CoDeX-S dataset
|
||||
export WANDB_DISABLED=true
|
||||
wandb offline
|
||||
@ -61,6 +44,24 @@ CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \
|
||||
--num_prefix 1 \
|
||||
--kge_model 'data/CoDeX-S-rotate.pth' \
|
||||
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt &
|
||||
|
||||
|
||||
# For FB15K-237N dataset
|
||||
export WANDB_DISABLED=true
|
||||
wandb offline
|
||||
CUDA_VISIBLE_DEVICES=0 nohup python finetune_kopa.py \
|
||||
--base_model 'YOUR LLM PATH' \
|
||||
--data_path 'data/FB15K-237N-train.json' \
|
||||
--output_dir 'YOUR SAVE PATH' \
|
||||
--num_epochs 3 \
|
||||
--lora_r 64 \
|
||||
--learning_rate 3e-4 \
|
||||
--batch_size 12 \
|
||||
--micro_batch_size 12 \
|
||||
--num_prefix 1 \
|
||||
--kge_model 'data/FB15K-237N-rotate.pth' \
|
||||
--lora_target_modules='[q_proj,k_proj,v_proj,o_proj]' > log.txt &
|
||||
|
||||
```
|
||||
You may need to fill the LLM path and save path before running. The hyper-parameters can be tuned by yourself.
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user