-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathdefault.yaml
More file actions
42 lines (37 loc) · 1.07 KB
/
default.yaml
File metadata and controls
42 lines (37 loc) · 1.07 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
model:
name: "unsloth/Llama-3.2-1B"
dtype: "bfloat16"
calib:
data_path: "./data/calib/refinedweb"
dataset: "refinedweb"
num_samples: 256
seq_len: 1024
batch_size: 8
seed: 2023
profiling:
module_names:
- "self_attn.q_proj"
- "self_attn.k_proj"
- "self_attn.v_proj"
- "self_attn.o_proj"
- "mlp.gate_proj"
- "mlp.up_proj"
- "mlp.down_proj"
cr_candidates: [0.05, 0.1, 0.15, 0.2, 0.25, 0.3, 0.35, 0.4, 0.45, 0.5, 0.55, 0.6, 0.65, 0.7]
ks_ratios: [1.0, 1.1, 1.2, 1.3, 1.4, 1.5, 1.6, 1.7, 1.8, 1.9, 2.0, 2.1, 2.2, 2.3, 2.4, 2.5, 2.6, 2.7, 2.8, 2.9, 3.0]
profile_cache: "./layer_prof_llama3_1b_v2.json"
cr_cache: "./cr_layer_prof_llama3_1b_v4.json"
compression:
target_kept_ratio: 0.20
param_precision: 50000
dobi_like: false
output_dir: "./llama3_1b_swift_svd_20_new"
adam_refine_steps: 100
method: dijekstra
evaluation:
tasks: ["wikitext"] #["mmlu", "arc_easy", "arc_challenge", "hellaswag", "piqa", "lambada_openai", "race", "sciq"] #
batch_size: 4
max_batch_size: 16
device: "cuda"
evaluation_ppl:
dataset_name: "c4"