You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.

config.yaml 1.2KB

3 months ago
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. shared:
  2. project_name: lowdim_prompts
  3. use_tqdm: true
  4. random_seed: 42
  5. default: &default
  6. model_name: google/t5-large-lm-adapt
  7. wandb_name: null
  8. train_batch_size: 32
  9. valid_batch_size: 32
  10. num_epochs: 200
  11. peft_params: null # no mutation
  12. hot_modules: null # fine-tune all
  13. balancify_train: false
  14. best_finder:
  15. save: true
  16. metric: valid_f1-score-ma
  17. higher_better: true
  18. tasks:
  19. - glue:cola
  20. run_configs:
  21. # - <<: *default
  22. # wandb_name: n_tokens100_n_comb_tokens512
  23. # learning_rate: 0.01
  24. # hot_modules:
  25. # - sadcl
  26. # peft_params:
  27. # kind: comb_prompt
  28. # n_tokens: 100
  29. # n_comb_tokens: 512
  30. # - <<: *default
  31. # wandb_name: n_tokens100_n_comb_tokens2048
  32. # learning_rate: 0.01
  33. # hot_modules:
  34. # - sadcl
  35. # peft_params:
  36. # kind: comb_prompt
  37. # n_tokens: 100
  38. # n_comb_tokens: 2048
  39. - <<: *default
  40. wandb_name: large_n_tokens100_64_256
  41. learning_rate: 0.01
  42. hot_modules:
  43. - sadcl
  44. peft_params:
  45. kind: lowdim_prompt
  46. n_tokens: 100
  47. dims:
  48. - 64
  49. - 256
  50. - <<: *default
  51. wandb_name: large_n_tokens100_256_512
  52. learning_rate: 0.01
  53. hot_modules:
  54. - sadcl
  55. peft_params:
  56. kind: lowdim_prompt
  57. n_tokens: 100
  58. dims:
  59. - 256
  60. - 512