调整优化器学习率,增加训练轮数,修改类别权重,更新学习率调度器参数,启用调度器详细输出

This commit is contained in:
陈培栋 2024-10-16 11:07:59 +08:00
parent 257818f86d
commit 20537fe10f
5 changed files with 1688 additions and 11 deletions

View File

@ -3,9 +3,9 @@ scheduler_type: LINEAR_WARMUP_THEN_POLY_SCHEDULER
# total_iters=epochs * 训练的图像数量
kwargs: |
{
"warmup_iters": 2904,
"total_iters": 290400,
"warmup_iters": 5808,
"total_iters": 580800,
"warmup_ratio": 0.000001,
"min_lr": 0.,
"min_lr": 0.00000,
"power": 1.
}

View File

@ -3,7 +3,7 @@ base_config:
optim_type: AdamW
kwargs: |
{
"lr": 0.00006,
"lr": 0.006,
"weight_decay": 0.01,
"betas": (0.9, 0.999)
}

View File

@ -3,14 +3,14 @@ batch_size: 16
image_height: 200
image_width: 200
workers: 0
epochs: 100
epochs: 200
# 每一类的占比权重如果要让每一类的占比权重相同为1.0即可
weight:
- 1.0
- 4.0
- 3.0
- 1.0
- 2.2
- 2.0
# 数据集存放位置
root:

View File

@ -257,27 +257,27 @@ def get_lr_scheduler(optimizer: optim, scheduler_type: SchedulerType, kwargs=Non
optimizer=optimizer,
step_size=30,
gamma=0.1,
verbose=False
verbose=True
)
elif scheduler_type == SchedulerType.MULTI_STEP_SCHEDULER:
return MultiStepScheduler(
optimizer=optimizer,
milestones=[30, 60, 90],
gamma=0.1,
verbose=False
verbose=True
)
elif scheduler_type == SchedulerType.EXPONENTIAL_SCHEDULER:
return ExponentialScheduler(
optimizer=optimizer,
gamma=0.95,
verbose=False
verbose=True
)
elif scheduler_type == SchedulerType.COSINE_ANNEALING_SCHEDULER:
return CosineAnnealingScheduler(
optimizer=optimizer,
t_max=5,
min_lr=0,
verbose=False
verbose=True
)
elif scheduler_type == SchedulerType.LINEAR_WARMUP_THEN_POLY_SCHEDULER:
return LinearWarmupThenPolyScheduler(

1677
log.txt Normal file

File diff suppressed because it is too large Load Diff