Merge pull request #403 from modelscope/dev/check_lint

fix yapf error
This commit is contained in:
Xingjun.Wang
2023-07-22 00:01:01 +08:00
committed by GitHub
3 changed files with 6 additions and 7 deletions

View File

@@ -445,7 +445,7 @@ def plot_image(tb_dir: str,
fname = os.listdir(tb_dir)[0]
tb_path = os.path.join(tb_dir, fname)
data = read_tensorboard_file(tb_path)
#
for k in data.keys():
_data = data[k]
steps = [d['step'] for d in _data]

View File

@@ -46,9 +46,8 @@ def mesh_warp_frame(frame, x_motion, y_motion, cap_width, cap_height):
homo,
origin_kp.contiguous().view(2, -1).permute(1, 0)).permute(1, 0)
projection.append(
projected_kp.contiguous().view(*origin_kp.shape).permute(
1, 2, 0)) # 2, H, W --> H, W, 2
projection.append(projected_kp.contiguous().view(
*origin_kp.shape).permute(1, 2, 0)) # 2, H, W --> H, W, 2
projection = torch.stack(projection, 0)
projection[:, :, :, 0] = projection[:, :, :, 0] / cfg.MODEL.WIDTH * 2. - 1.

View File

@@ -430,9 +430,9 @@ class MOELayer(Base):
self.use_expert_residual_network = use_expert_residual_network
if self.use_expert_residual_network:
self.expert_network = nn.Sequential(
*([ExpertResidualLayer(self.gate.model_dim)
for _ in range(6)])) # noqa
self.expert_network = nn.Sequential(*([
ExpertResidualLayer(self.gate.model_dim) for _ in range(6)
])) # noqa
self.use_tutel = use_tutel and TUTEL_INSTALLED