from typing import Union, Optional import numpy as np import torch import torch.optim as optim from torch.utils.data import DataLoader from torch.utils.data.dataloader import default_collate from models import RGBPartNet from utils.configuration import DataloaderConfiguration, \ HyperparameterConfiguration, DatasetConfiguration, ModelConfiguration from utils.dataset import CASIAB from utils.sampler import TripletSampler class Model: def __init__( self, model_config: ModelConfiguration, hyperparameter_config: HyperparameterConfiguration ): self.meta = model_config self.hp = hyperparameter_config self.curr_iter = self.meta['restore_iter'] self.is_train: bool = True self.train_size: int = 74 self.in_channels: int = 3 self.pr: Optional[int] = None self.k: Optional[int] = None self._model_sig: str = self._make_signature(self.meta, ['restore_iter']) self._hp_sig: str = self._make_signature(self.hp) self._dataset_sig: str = 'undefined' self.rbg_pn: Optional[RGBPartNet] = None self.optimizer: Optional[optim.Adam] = None self.scheduler: Optional[optim.lr_scheduler.StepLR] = None @property def signature(self) -> str: return '_'.join((self._model_sig, str(self.curr_iter), self._hp_sig, self._dataset_sig, str(self.batch_size))) @property def batch_size(self) -> int: if self.is_train: if self.pr and self.k: return self.pr * self.k raise AttributeError('No dataset loaded') else: return 1 def fit( self, dataset_config: DatasetConfiguration, dataloader_config: DataloaderConfiguration, ): self.is_train = True dataset = self._parse_dataset_config(dataset_config) dataloader = self._parse_dataloader_config(dataset, dataloader_config) # Prepare for model, optimizer and scheduler hp = self.hp.copy() lr, betas = hp.pop('lr', 1e-4), hp.pop('betas', (0.9, 0.999)) self.rbg_pn = RGBPartNet(self.train_size, self.in_channels, **hp) self.optimizer = optim.Adam(self.rbg_pn.parameters(), lr, betas) self.scheduler = optim.lr_scheduler.StepLR(self.optimizer, 500, 0.9) self.rbg_pn.train() for iter_i, (x_c1, x_c2) in enumerate(dataloader): loss = self.rbg_pn(x_c1['clip'], x_c2['clip'], x_c1['label']) loss.backward() self.optimizer.step() self.scheduler.step(iter_i) if iter_i == self.meta['total_iter']: break def _parse_dataset_config( self, dataset_config: DatasetConfiguration ) -> Union[CASIAB]: self.train_size = dataset_config['train_size'] self.in_channels = dataset_config['num_input_channels'] self._dataset_sig = self._make_signature( dataset_config, popped_keys=['root_dir', 'cache_on'] ) config: dict = dataset_config.copy() name = config.pop('name') if name == 'CASIA-B': return CASIAB(**config, is_train=self.is_train) elif name == 'FVG': # TODO pass raise ValueError('Invalid dataset: {0}'.format(name)) def _parse_dataloader_config( self, dataset: Union[CASIAB], dataloader_config: DataloaderConfiguration ) -> DataLoader: config: dict = dataloader_config.copy() if self.is_train: (self.pr, self.k) = config.pop('batch_size') triplet_sampler = TripletSampler(dataset, (self.pr, self.k)) return DataLoader(dataset, batch_sampler=triplet_sampler, collate_fn=self._batch_splitter, **config) else: # is_test config.pop('batch_size') return DataLoader(dataset, **config) def _batch_splitter( self, batch: list[dict[str, Union[np.int64, str, torch.Tensor]]] ) -> tuple[dict[str, Union[list[str], torch.Tensor]], dict[str, Union[list[str], torch.Tensor]]]: """ Disentanglement need two random conditions, this function will split pr * k * 2 samples to 2 dicts each containing pr * k samples. labels and clip data are tensor, and others are list. """ _batch = [[], []] for i in range(0, self.pr * self.k * 2, self.k * 2): _batch[0] += batch[i:i + self.k] _batch[1] += batch[i + self.k:i + self.k * 2] return default_collate(_batch[0]), default_collate(_batch[1]) @staticmethod def _make_signature(config: dict, popped_keys: Optional[list] = None) -> str: _config = config.copy() for (key, value) in config.items(): if popped_keys and key in popped_keys: _config.pop(key) continue if isinstance(value, str): pass elif isinstance(value, (tuple, list)): _config[key] = '_'.join([str(v) for v in value]) else: _config[key] = str(value) return '_'.join(_config.values())