Skip to content

Commit

Permalink
fix config
Browse files Browse the repository at this point in the history
  • Loading branch information
mikecovlee committed Jan 6, 2024
1 parent de52c99 commit c628685
Show file tree
Hide file tree
Showing 3 changed files with 14 additions and 21 deletions.
10 changes: 4 additions & 6 deletions config/alpaca.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,15 @@
"lora": [
{
"name": "lora_0",
"output": "lora_0",
"optim": "adamw",
"lr": 3e-4,
"batch_size": 16,
"micro_batch_size": 4,
"test_batch_size": 64,
"num_epochs": 3,
"r": 8,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": false,
Expand All @@ -36,15 +35,14 @@
{
"name": "lora_1",
"output": "lora_1",
"optim": "adamw",
"lr": 3e-4,
"batch_size": 16,
"micro_batch_size": 4,
"test_batch_size": 64,
"num_epochs": 3,
"r": 8,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": false,
Expand Down
10 changes: 4 additions & 6 deletions config/dummy.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"lora": [
{
"name": "lora_0",
"output": "lora_0",
"optim": "adamw",
"lr": 3e-4,
"momentum": 0.9,
Expand All @@ -17,8 +16,8 @@
"test_batch_size": 64,
"num_epochs": 50,
"r": 8,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": true,
Expand All @@ -36,16 +35,15 @@
},
{
"name": "lora_1",
"output": "lora_1",
"optim": "adamw",
"lr": 3e-4,
"batch_size": 64,
"micro_batch_size": 64,
"test_batch_size": 64,
"num_epochs": 50,
"r": 32,
"alpha": 64,
"dropout": 0.05,
"lora_alpha": 64,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": true,
Expand Down
15 changes: 6 additions & 9 deletions config/finetune.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
"lora": [
{
"name": "lora_0",
"output": "lora_0",
"optim": "sgd",
"lr": 1e-2,
"momentum": 0.9,
Expand All @@ -17,8 +16,8 @@
"test_batch_size": 64,
"num_epochs": 3,
"r": 8,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": true,
Expand All @@ -36,16 +35,15 @@
},
{
"name": "lora_1",
"output": "lora_1",
"optim": "adamw",
"lr": 3e-4,
"batch_size": 16,
"micro_batch_size": 4,
"test_batch_size": 64,
"num_epochs": 3,
"r": 8,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": true,
Expand All @@ -63,16 +61,15 @@
},
{
"name": "lora_2",
"output": "lora_2",
"optim": "adamw",
"lr": 3e-4,
"batch_size": 16,
"micro_batch_size": 4,
"test_batch_size": 64,
"num_epochs": 3,
"r": 16,
"alpha": 16,
"dropout": 0.05,
"lora_alpha": 16,
"lora_dropout": 0.05,
"target_modules": {
"q_proj": true,
"k_proj": true,
Expand Down

0 comments on commit c628685

Please sign in to comment.