from collections import namedtuple import torch LOG_DIR = "./logs/" CACHE_DIR = "./cache/" device = "cuda" if torch.cuda.is_available() else "cpu" FineTuningConfig = namedtuple('FineTuningConfig', field_names="num_classes, dropout, init_range, batch_size, lr, max_norm," "n_warmup, valid_pct, gradient_acc_steps, device, log_dir, dataset_cache") finetuning_config = FineTuningConfig( 2, 0.1, 0.02, BATCH_SIZE, 6.5e-5, 1.0, 10, 0.1, 1, device, LOG_DIR, CACHE_DIR+'dataset_cache.bin') finetuning_config