power_cogs.config.torch package¶
Submodules¶
power_cogs.config.torch.torch_config module¶
-
class
power_cogs.config.torch.torch_config.
AdamConf
(_target_: str = 'torch.optim.adam.Adam', params: Any = '???', lr: Any = 0.001, betas: Any = (0.9, 0.999), eps: Any = 1e-08, weight_decay: Any = 0, amsgrad: Any = False)[source]¶ Bases:
object
-
amsgrad
= False¶
-
betas
= (0.9, 0.999)¶
-
eps
= 1e-08¶
-
lr
= 0.001¶
-
params
= '???'¶
-
weight_decay
= 0¶
-
-
class
power_cogs.config.torch.torch_config.
ChainDatasetConf
(_target_: str = 'torch.utils.data.dataset.ChainDataset', datasets: Any = '???')[source]¶ Bases:
object
-
datasets
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
ConcatDatasetConf
(_target_: str = 'torch.utils.data.dataset.ConcatDataset', datasets: Any = '???')[source]¶ Bases:
object
-
datasets
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
CosineAnnealingLRConf
(_target_: str = 'torch.optim.lr_scheduler.CosineAnnealingLR', optimizer: Any = '???', T_max: Any = '???', eta_min: Any = 0, last_epoch: Any = -1)[source]¶ Bases:
object
-
T_max
= '???'¶
-
eta_min
= 0¶
-
last_epoch
= -1¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
CosineAnnealingWarmRestartsConf
(_target_: str = 'torch.optim.lr_scheduler.CosineAnnealingWarmRestarts', optimizer: Any = '???', T_0: Any = '???', T_mult: Any = 1, eta_min: Any = 0, last_epoch: Any = -1)[source]¶ Bases:
object
-
T_0
= '???'¶
-
T_mult
= 1¶
-
eta_min
= 0¶
-
last_epoch
= -1¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
CyclicLRConf
(_target_: str = 'torch.optim.lr_scheduler.CyclicLR', optimizer: Any = '???', base_lr: Any = '???', max_lr: Any = '???', step_size_up: Any = 2000, step_size_down: Any = None, mode: Any = 'triangular', gamma: Any = 1.0, scale_fn: Any = None, scale_mode: Any = 'cycle', cycle_momentum: Any = True, base_momentum: Any = 0.8, max_momentum: Any = 0.9, last_epoch: Any = -1)[source]¶ Bases:
object
-
base_lr
= '???'¶
-
base_momentum
= 0.8¶
-
cycle_momentum
= True¶
-
gamma
= 1.0¶
-
last_epoch
= -1¶
-
max_lr
= '???'¶
-
max_momentum
= 0.9¶
-
mode
= 'triangular'¶
-
optimizer
= '???'¶
-
scale_fn
= None¶
-
scale_mode
= 'cycle'¶
-
step_size_down
= None¶
-
step_size_up
= 2000¶
-
-
class
power_cogs.config.torch.torch_config.
DataLoaderConf
(_target_: str = 'torch.utils.data.dataloader.DataLoader', dataset: Any = '???', batch_size: Any = 1, shuffle: Any = False, sampler: Any = None, batch_sampler: Any = None, num_workers: Any = 0, collate_fn: Any = None, pin_memory: Any = False, drop_last: Any = False, timeout: Any = 0, worker_init_fn: Any = None, multiprocessing_context: Any = None, generator: Any = None)[source]¶ Bases:
object
-
batch_sampler
= None¶
-
batch_size
= 1¶
-
collate_fn
= None¶
-
dataset
= '???'¶
-
drop_last
= False¶
-
generator
= None¶
-
multiprocessing_context
= None¶
-
num_workers
= 0¶
-
pin_memory
= False¶
-
sampler
= None¶
-
shuffle
= False¶
-
timeout
= 0¶
-
worker_init_fn
= None¶
-
-
class
power_cogs.config.torch.torch_config.
DatasetConf
(_target_: str = 'torch.utils.data.dataset.Dataset')[source]¶ Bases:
object
-
class
power_cogs.config.torch.torch_config.
ExponentialLRConf
(_target_: str = 'torch.optim.lr_scheduler.ExponentialLR', optimizer: Any = '???', gamma: Any = 0.9999, last_epoch: Any = -1)[source]¶ Bases:
object
-
gamma
= 0.9999¶
-
last_epoch
= -1¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
IterableDatasetConf
(_target_: str = 'torch.utils.data.dataset.IterableDataset')[source]¶ Bases:
object
-
class
power_cogs.config.torch.torch_config.
LambdaLRConf
(_target_: str = 'torch.optim.lr_scheduler.LambdaLR', optimizer: Any = '???', lr_lambda: Any = '???', last_epoch: Any = -1)[source]¶ Bases:
object
-
last_epoch
= -1¶
-
lr_lambda
= '???'¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
MultiStepLRConf
(_target_: str = 'torch.optim.lr_scheduler.MultiStepLR', optimizer: Any = '???', milestones: Any = '???', gamma: Any = 0.1, last_epoch: Any = -1)[source]¶ Bases:
object
-
gamma
= 0.1¶
-
last_epoch
= -1¶
-
milestones
= '???'¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
MultiplicativeLRConf
(_target_: str = 'torch.optim.lr_scheduler.MultiplicativeLR', optimizer: Any = '???', lr_lambda: Any = '???', last_epoch: Any = -1)[source]¶ Bases:
object
-
last_epoch
= -1¶
-
lr_lambda
= '???'¶
-
optimizer
= '???'¶
-
-
class
power_cogs.config.torch.torch_config.
OneCycleLRConf
(_target_: str = 'torch.optim.lr_scheduler.OneCycleLR', optimizer: Any = '???', max_lr: Any = '???', total_steps: Any = None, epochs: Any = None, steps_per_epoch: Any = None, pct_start: Any = 0.3, anneal_strategy: Any = 'cos', cycle_momentum: Any = True, base_momentum: Any = 0.85, max_momentum: Any = 0.95, div_factor: Any = 25.0, final_div_factor: Any = 10000.0, last_epoch: Any = -1)[source]¶ Bases:
object
-
anneal_strategy
= 'cos'¶
-
base_momentum
= 0.85¶
-
cycle_momentum
= True¶
-
div_factor
= 25.0¶
-
epochs
= None¶
-
final_div_factor
= 10000.0¶
-
last_epoch
= -1¶
-
max_lr
= '???'¶
-
max_momentum
= 0.95¶
-
optimizer
= '???'¶
-
pct_start
= 0.3¶
-
steps_per_epoch
= None¶
-
total_steps
= None¶
-
-
class
power_cogs.config.torch.torch_config.
ReduceLROnPlateauConf
(_target_: str = 'torch.optim.lr_scheduler.ReduceLROnPlateau', optimizer: Any = '???', mode: Any = 'min', factor: Any = 0.1, patience: Any = 10, verbose: Any = False, threshold: Any = 0.0001, threshold_mode: Any = 'rel', cooldown: Any = 0, min_lr: Any = 0, eps: Any = 1e-08)[source]¶ Bases:
object
-
cooldown
= 0¶
-
eps
= 1e-08¶
-
factor
= 0.1¶
-
min_lr
= 0¶
-
mode
= 'min'¶
-
optimizer
= '???'¶
-
patience
= 10¶
-
threshold
= 0.0001¶
-
threshold_mode
= 'rel'¶
-
verbose
= False¶
-
-
class
power_cogs.config.torch.torch_config.
StepLRConf
(_target_: str = 'torch.optim.lr_scheduler.StepLR', optimizer: Any = '???', step_size: Any = 0.1, gamma: Any = 0.1, last_epoch: Any = -1)[source]¶ Bases:
object
-
gamma
= 0.1¶
-
last_epoch
= -1¶
-
optimizer
= '???'¶
-
step_size
= 0.1¶
-