Fix super parameters

This commit is contained in:
hzwer
2021-03-07 01:00:24 +08:00
parent bea12fe290
commit 781ea8c9c1
5 changed files with 5 additions and 5 deletions

View File

@@ -110,7 +110,7 @@ class Model:
self.optimG = AdamW(itertools.chain( self.optimG = AdamW(itertools.chain(
self.flownet.parameters(), self.flownet.parameters(),
self.contextnet.parameters(), self.contextnet.parameters(),
self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4)
self.schedulerG = optim.lr_scheduler.CyclicLR( self.schedulerG = optim.lr_scheduler.CyclicLR(
self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False)
self.epe = EPE() self.epe = EPE()

View File

@@ -110,7 +110,7 @@ class Model:
self.optimG = AdamW(itertools.chain( self.optimG = AdamW(itertools.chain(
self.flownet.parameters(), self.flownet.parameters(),
self.contextnet.parameters(), self.contextnet.parameters(),
self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4)
self.schedulerG = optim.lr_scheduler.CyclicLR( self.schedulerG = optim.lr_scheduler.CyclicLR(
self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False)
self.epe = EPE() self.epe = EPE()

View File

@@ -110,7 +110,7 @@ class Model:
self.optimG = AdamW(itertools.chain( self.optimG = AdamW(itertools.chain(
self.flownet.parameters(), self.flownet.parameters(),
self.contextnet.parameters(), self.contextnet.parameters(),
self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4)
self.schedulerG = optim.lr_scheduler.CyclicLR( self.schedulerG = optim.lr_scheduler.CyclicLR(
self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False)
self.epe = EPE() self.epe = EPE()

View File

@@ -110,7 +110,7 @@ class Model:
self.optimG = AdamW(itertools.chain( self.optimG = AdamW(itertools.chain(
self.flownet.parameters(), self.flownet.parameters(),
self.contextnet.parameters(), self.contextnet.parameters(),
self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4)
self.schedulerG = optim.lr_scheduler.CyclicLR( self.schedulerG = optim.lr_scheduler.CyclicLR(
self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False)
self.epe = EPE() self.epe = EPE()

View File

@@ -19,7 +19,7 @@ def get_learning_rate(step):
mul = step / 2000. mul = step / 2000.
else: else:
mul = np.cos((step - 2000) / (args.epoch * args.step_per_epoch - 2000.) * math.pi) * 0.5 + 0.5 mul = np.cos((step - 2000) / (args.epoch * args.step_per_epoch - 2000.) * math.pi) * 0.5 + 0.5
return 5e-4 * mul return 3e-4 * mul
def flow2rgb(flow_map_np): def flow2rgb(flow_map_np):
h, w, _ = flow_map_np.shape h, w, _ = flow_map_np.shape