Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
jantic
GitHub Repository: jantic/deoldify
Path: blob/master/fastai/callbacks/one_cycle.py
781 views
1
"Supports 1-Cycle style training"
2
from ..core import *
3
from ..callback import *
4
from ..basic_train import Learner,LearnerCallback
5
6
__all__ = ['OneCycleScheduler']
7
8
class OneCycleScheduler(LearnerCallback):
9
"Manage 1-Cycle style training as outlined in Leslie Smith's [paper](https://arxiv.org/pdf/1803.09820.pdf)."
10
def __init__(self, learn:Learner, lr_max:float, moms:Floats=(0.95,0.85), div_factor:float=25., pct_start:float=0.3,
11
final_div:float=None, tot_epochs:int=None, start_epoch:int=None):
12
super().__init__(learn)
13
self.lr_max,self.div_factor,self.pct_start,self.final_div = lr_max,div_factor,pct_start,final_div
14
if self.final_div is None: self.final_div = div_factor*1e4
15
self.moms=tuple(listify(moms,2))
16
if is_listy(self.lr_max): self.lr_max = np.array(self.lr_max)
17
self.start_epoch, self.tot_epochs = start_epoch, tot_epochs
18
19
def steps(self, *steps_cfg:StartOptEnd):
20
"Build anneal schedule for all of the parameters."
21
return [Scheduler(step, n_iter, func=func)
22
for (step,(n_iter,func)) in zip(steps_cfg, self.phases)]
23
24
def on_train_begin(self, n_epochs:int, epoch:int, **kwargs:Any)->None:
25
"Initialize our optimization params based on our annealing schedule."
26
res = {'epoch':self.start_epoch} if self.start_epoch is not None else None
27
self.start_epoch = ifnone(self.start_epoch, epoch)
28
self.tot_epochs = ifnone(self.tot_epochs, n_epochs)
29
n = len(self.learn.data.train_dl) * self.tot_epochs
30
a1 = int(n * self.pct_start)
31
a2 = n-a1
32
self.phases = ((a1, annealing_cos), (a2, annealing_cos))
33
low_lr = self.lr_max/self.div_factor
34
self.lr_scheds = self.steps((low_lr, self.lr_max), (self.lr_max, self.lr_max/self.final_div))
35
self.mom_scheds = self.steps(self.moms, (self.moms[1], self.moms[0]))
36
self.opt = self.learn.opt
37
self.opt.lr,self.opt.mom = self.lr_scheds[0].start,self.mom_scheds[0].start
38
self.idx_s = 0
39
return res
40
41
def jump_to_epoch(self, epoch:int)->None:
42
for _ in range(len(self.learn.data.train_dl) * epoch):
43
self.on_batch_end(True)
44
45
def on_batch_end(self, train, **kwargs:Any)->None:
46
"Take one step forward on the annealing schedule for the optim params."
47
if train:
48
if self.idx_s >= len(self.lr_scheds): return {'stop_training': True, 'stop_epoch': True}
49
self.opt.lr = self.lr_scheds[self.idx_s].step()
50
self.opt.mom = self.mom_scheds[self.idx_s].step()
51
# when the current schedule is complete we move onto the next
52
# schedule. (in 1-cycle there are two schedules)
53
if self.lr_scheds[self.idx_s].is_done:
54
self.idx_s += 1
55
56
def on_epoch_end(self, epoch, **kwargs:Any)->None:
57
"Tell Learner to stop if the cycle is finished."
58
if epoch > self.tot_epochs: return {'stop_training': True}
59
60