-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpretrain.py
More file actions
76 lines (57 loc) · 1.96 KB
/
pretrain.py
File metadata and controls
76 lines (57 loc) · 1.96 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
import argparse
import os
from pathlib import Path
import lightning as L
import torch
import yaml
from lightning.pytorch.loggers import WandbLogger
from pytorch_lightning.utilities import rank_zero_only
from digitalcell.conf.utils import load_config
from digitalcell.data.datamodule import HiC_DataModule, HiC_Dataset
from digitalcell.model.hict import HiCT, HiCT_Config
# include when using Tensor Cores
torch.set_float32_matmul_precision('high')
import multiprocessing as mp
mp.set_start_method('spawn', force=True)
@rank_zero_only
def save_config_file(config: dict) -> None:
save_dir = config['datamodule']['metadata_dir']
if save_dir is not None:
os.makedirs(save_dir, exist_ok=True)
fname = os.path.join(save_dir, 'config.yaml')
with open(fname, 'w') as f:
yaml.dump(config, f, default_flow_style=False)
def main(
config: dict
) -> None:
if config['datamodule']['metadata_dir'] is not None:
save_config_file(config)
model_config = HiCT_Config(**config['model'])
model = HiCT(model_config)
datamodule = HiC_DataModule(**config['datamodule'], dataset_config=config['dataset'])
if config['logger']['use_wandb']:
del config['logger']['use_wandb']
logger = WandbLogger(**config['logger'])
else:
logger = None
trainer = L.Trainer(
**config['trainer'],
logger=logger
)
# https://lightning.ai/docs/pytorch/stable/common/trainer.html#fit
trainer.fit(
model=model,
datamodule=datamodule,
ckpt_path=config.get('ckpt_path', None)
)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Pretrains the Hi-C foundation model.')
parser.add_argument(
'--config',
type=Path,
default='digitalcell/conf/pretrain.yaml',
help='Path to the pretraining configuration file.'
)
args = parser.parse_args()
config = load_config(args.config)
main(config)