From 781ea8c9c11c2a0954bb2b80ddfdd1a018b33fe0 Mon Sep 17 00:00:00 2001 From: hzwer <598460606@163.com> Date: Sun, 7 Mar 2021 01:00:24 +0800 Subject: [PATCH] Fix super parameters --- model/RIFE.py | 2 +- model/RIFE15C.py | 2 +- model/RIFE2F.py | 2 +- model/RIFE2F15C.py | 2 +- train.py | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/model/RIFE.py b/model/RIFE.py index 7dfa60f..0a9094f 100644 --- a/model/RIFE.py +++ b/model/RIFE.py @@ -110,7 +110,7 @@ class Model: self.optimG = AdamW(itertools.chain( self.flownet.parameters(), self.contextnet.parameters(), - self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) + self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4) self.schedulerG = optim.lr_scheduler.CyclicLR( self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.epe = EPE() diff --git a/model/RIFE15C.py b/model/RIFE15C.py index 02ac33e..aaee0b4 100644 --- a/model/RIFE15C.py +++ b/model/RIFE15C.py @@ -110,7 +110,7 @@ class Model: self.optimG = AdamW(itertools.chain( self.flownet.parameters(), self.contextnet.parameters(), - self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) + self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4) self.schedulerG = optim.lr_scheduler.CyclicLR( self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.epe = EPE() diff --git a/model/RIFE2F.py b/model/RIFE2F.py index 4a4b382..7e368bb 100644 --- a/model/RIFE2F.py +++ b/model/RIFE2F.py @@ -110,7 +110,7 @@ class Model: self.optimG = AdamW(itertools.chain( self.flownet.parameters(), self.contextnet.parameters(), - self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) + self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4) self.schedulerG = optim.lr_scheduler.CyclicLR( self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.epe = EPE() diff --git a/model/RIFE2F15C.py b/model/RIFE2F15C.py index d59c557..d75a448 100644 --- a/model/RIFE2F15C.py +++ b/model/RIFE2F15C.py @@ -110,7 +110,7 @@ class Model: self.optimG = AdamW(itertools.chain( self.flownet.parameters(), self.contextnet.parameters(), - self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-5) + self.fusionnet.parameters()), lr=1e-6, weight_decay=1e-4) self.schedulerG = optim.lr_scheduler.CyclicLR( self.optimG, base_lr=1e-6, max_lr=1e-3, step_size_up=8000, cycle_momentum=False) self.epe = EPE() diff --git a/train.py b/train.py index 00080b6..002dddd 100644 --- a/train.py +++ b/train.py @@ -19,7 +19,7 @@ def get_learning_rate(step): mul = step / 2000. else: mul = np.cos((step - 2000) / (args.epoch * args.step_per_epoch - 2000.) * math.pi) * 0.5 + 0.5 - return 5e-4 * mul + return 3e-4 * mul def flow2rgb(flow_map_np): h, w, _ = flow_map_np.shape