Fixed and improved XVFI code to work better with embedded python runtime

This commit is contained in:
n00mkrad 2021-08-24 22:02:41 +02:00
parent 75d4085487
commit fc72b3899f
2 changed files with 28 additions and 150 deletions

View File

@ -2,6 +2,15 @@ import argparse, os, shutil, time, random, torch, cv2, datetime, torch.utils.dat
import torch.backends.cudnn as cudnn
import torch.optim as optim
import numpy as np
import sys
import os
abspath = os.path.abspath(__file__)
wrkdir = os.path.dirname(abspath)
print("Changing working dir to {0}".format(wrkdir))
os.chdir(os.path.dirname(wrkdir))
print("Added {0} to temporary PATH".format(wrkdir))
sys.path.append(wrkdir)
from torch.autograd import Variable
from utils import *
@ -142,7 +151,7 @@ def main():
epoch = args.epochs - 1
elif args.phase == "test" or args.phase == "metrics_evaluation" or args.phase == 'test_custom':
checkpoint = SM.load_model(args.mdl_dir)
checkpoint = SM.load_model(os.path.join(wrkdir, args.mdl_dir))
model_net.load_state_dict(checkpoint['state_dict_Model'])
epoch = checkpoint['last_epoch']
@ -309,20 +318,26 @@ def main():
print("information of model:", args.model_dir)
print("best_PSNR of model:", best_PSNR)
def write_src_frame(src_path, target_path, args):
filename, file_ext = os.path.splitext(src_path)
if file_ext == f".{args.img_format}":
shutil.copy(src_path, target_path)
else:
cv2.imwrite(target_path, cv2.imread(src_path))
def test(test_loader, model_net, criterion, epoch, args, device, multiple, postfix, validation):
#os.chdir(interp_output_path)
batch_time = AverageClass('Time:', ':6.3f')
losses = AverageClass('testLoss:', ':.4e')
PSNRs = AverageClass('testPSNR:', ':.4e')
SSIMs = AverageClass('testSSIM:', ':.4e')
#batch_time = AverageClass('Time:', ':6.3f')
#losses = AverageClass('testLoss:', ':.4e')
#PSNRs = AverageClass('testPSNR:', ':.4e')
#SSIMs = AverageClass('testSSIM:', ':.4e')
args.divide = 2 ** (args.S_tst) * args.module_scale_factor * 4
# progress = ProgressMeter(len(test_loader), batch_time, accm_time, losses, PSNRs, SSIMs, prefix='Test after Epoch[{}]: '.format(epoch))
progress = ProgressMeter(len(test_loader), PSNRs, SSIMs, prefix='Test after Epoch[{}]: '.format(epoch))
#progress = ProgressMeter(len(test_loader), PSNRs, SSIMs, prefix='Test after Epoch[{}]: '.format(epoch))
multi_scale_recon_loss = criterion[0]
#multi_scale_recon_loss = criterion[0]
# switch to evaluate mode
model_net.eval()
@ -383,7 +398,7 @@ def test(test_loader, model_net, criterion, epoch, args, device, multiple, postf
pass
else:
print(f"S => {os.path.basename(src_frame_path)} => {os.path.basename(frame_src_path)}")
shutil.copy(src_frame_path, frame_src_path)
write_src_frame(src_frame_path, frame_src_path, args)
copied_src_frames.append(src_frame_path)
counter += 1
@ -401,7 +416,7 @@ def test(test_loader, model_net, criterion, epoch, args, device, multiple, postf
frame_src_path = os.path.join(args.custom_path, args.output, '{:0>8d}.{}'.format(counter, args.img_format))
print(f"LAST S => {frame_src_path}")
src_frame_path = os.path.join(args.custom_path, args.input, last_frame)
shutil.copy(src_frame_path, frame_src_path)
write_src_frame(src_frame_path, frame_src_path, args)
return epoch_save_path

View File

@ -3,7 +3,7 @@ import os, glob, sys, torch, shutil, random, math, time, cv2
import numpy as np
import torch.utils.data as data
import torch.nn as nn
import pandas as pd
#import pandas as pd
import torch.nn.functional as F
from datetime import datetime
from torch.nn import init
@ -184,83 +184,6 @@ def RGBframes_np2Tensor(imgIn, channel):
return imgIn
def make_2D_dataset_X_Train(dir):
framesPath = []
# Find and loop over all the clips in root `dir`.
for scene_path in sorted(glob.glob(os.path.join(dir, '*', ''))):
sample_paths = sorted(glob.glob(os.path.join(scene_path, '*', '')))
for sample_path in sample_paths:
frame65_list = []
for frame in sorted(glob.glob(os.path.join(sample_path, '*.png'))):
frame65_list.append(frame)
framesPath.append(frame65_list)
print("The number of total training samples : {} which has 65 frames each.".format(
len(framesPath))) ## 4408 folders which have 65 frames each
return framesPath
class X_Train(data.Dataset):
def __init__(self, args, max_t_step_size):
self.args = args
self.max_t_step_size = max_t_step_size
self.framesPath = make_2D_dataset_X_Train(self.args.train_data_path)
self.nScenes = len(self.framesPath)
# Raise error if no images found in train_data_path.
if self.nScenes == 0:
raise (RuntimeError("Found 0 files in subfolders of: " + self.args.train_data_path + "\n"))
def __getitem__(self, idx):
t_step_size = random.randint(2, self.max_t_step_size)
t_list = np.linspace((1 / t_step_size), (1 - (1 / t_step_size)), (t_step_size - 1))
candidate_frames = self.framesPath[idx]
firstFrameIdx = random.randint(0, (64 - t_step_size))
interIdx = random.randint(1, t_step_size - 1) # relative index, 1~self.t_step_size-1
interFrameIdx = firstFrameIdx + interIdx # absolute index
t_value = t_list[interIdx - 1] # [0,1]
if (random.randint(0, 1)):
frameRange = [firstFrameIdx, firstFrameIdx + t_step_size, interFrameIdx]
else: ## temporally reversed order
frameRange = [firstFrameIdx + t_step_size, firstFrameIdx, interFrameIdx]
interIdx = t_step_size - interIdx # (self.t_step_size-1) ~ 1
t_value = 1.0 - t_value
frames = frames_loader_train(self.args, candidate_frames,
frameRange) # including "np2Tensor [-1,1] normalized"
return frames, np.expand_dims(np.array(t_value, dtype=np.float32), 0)
def __len__(self):
return self.nScenes
def make_2D_dataset_X_Test(dir, multiple, t_step_size):
""" make [I0,I1,It,t,scene_folder] """
""" 1D (accumulated) """
testPath = []
t = np.linspace((1 / multiple), (1 - (1 / multiple)), (multiple - 1))
for type_folder in sorted(glob.glob(os.path.join(dir, '*', ''))): # [type1,type2,type3,...]
for scene_folder in sorted(glob.glob(os.path.join(type_folder, '*', ''))): # [scene1,scene2,..]
frame_folder = sorted(glob.glob(scene_folder + '*.png')) # 32 multiple, ['00000.png',...,'00032.png']
for idx in range(0, len(frame_folder), t_step_size): # 0,32,64,...
if idx == len(frame_folder) - 1:
break
for mul in range(multiple - 1):
I0I1It_paths = []
I0I1It_paths.append(frame_folder[idx]) # I0 (fix)
I0I1It_paths.append(frame_folder[idx + t_step_size]) # I1 (fix)
I0I1It_paths.append(frame_folder[idx + int((t_step_size // multiple) * (mul + 1))]) # It
I0I1It_paths.append(t[mul])
I0I1It_paths.append(scene_folder.split(os.path.join(dir, ''))[-1]) # type1/scene1
testPath.append(I0I1It_paths)
return testPath
class X_Test(data.Dataset):
def __init__(self, args, multiple, validation):
self.args = args
self.multiple = multiple
@ -297,44 +220,6 @@ class X_Test(data.Dataset):
return self.nIterations
class Vimeo_Train(data.Dataset):
def __init__(self, args):
self.args = args
self.t = 0.5
self.framesPath = []
f = open(os.path.join(args.vimeo_data_path, 'tri_trainlist.txt'),
'r') # '../Datasets/vimeo_triplet/sequences/tri_trainlist.txt'
while True:
scene_path = f.readline().split('\n')[0]
if not scene_path: break
frames_list = sorted(glob.glob(os.path.join(args.vimeo_data_path, 'sequences', scene_path,
'*.png'))) # '../Datasets/vimeo_triplet/sequences/%05d/%04d/*.png'
self.framesPath.append(frames_list)
f.close
# self.framesPath = self.framesPath[:20]
self.nScenes = len(self.framesPath)
if self.nScenes == 0:
raise (RuntimeError("Found 0 files in subfolders of: " + args.vimeo_data_path + "\n"))
print("nScenes of Vimeo train triplet : ", self.nScenes)
def __getitem__(self, idx):
candidate_frames = self.framesPath[idx]
""" Randomly reverse frames """
if (random.randint(0, 1)):
frameRange = [0, 2, 1]
else:
frameRange = [2, 0, 1]
frames = frames_loader_train(self.args, candidate_frames,
frameRange) # including "np2Tensor [-1,1] normalized"
return frames, np.expand_dims(np.array(0.5, dtype=np.float32), 0)
def __len__(self):
return self.nScenes
class Vimeo_Test(data.Dataset):
def __init__(self, args, validation):
self.args = args
self.framesPath = []
@ -343,7 +228,7 @@ class Vimeo_Test(data.Dataset):
scene_path = f.readline().split('\n')[0]
if not scene_path: break
frames_list = sorted(glob.glob(os.path.join(args.vimeo_data_path, 'sequences', scene_path,
'*.png'))) # '../Datasets/vimeo_triplet/sequences/%05d/%04d/*.png'
'*.*'))) # '../Datasets/vimeo_triplet/sequences/%05d/%04d/*.png'
self.framesPath.append(frames_list)
if validation:
self.framesPath = self.framesPath[::37]
@ -351,7 +236,7 @@ class Vimeo_Test(data.Dataset):
self.num_scene = len(self.framesPath) # total test scenes
if len(self.framesPath) == 0:
raise (RuntimeError("Found 0 files in subfolders of: " + args.vimeo_data_path + "\n"))
raise (RuntimeError("Found no files in subfolders of: " + args.vimeo_data_path + "\n"))
else:
print("# of Vimeo triplet testset : ", self.num_scene)
@ -377,7 +262,7 @@ def make_2D_dataset_Custom_Test(dir, multiple):
testPath = []
t = np.linspace((1 / multiple), (1 - (1 / multiple)), (multiple - 1))
for scene_folder in sorted(glob.glob(os.path.join(dir, '*', ''))): # [scene1, scene2, scene3, ...]
frame_folder = sorted(glob.glob(scene_folder + '*.png')) # ex) ['00000.png',...,'00123.png']
frame_folder = sorted(glob.glob(scene_folder + '*.*')) # ex) ['00000.png',...,'00123.png']
for idx in range(0, len(frame_folder)):
if idx == len(frame_folder) - 1:
break
@ -395,28 +280,6 @@ def make_2D_dataset_Custom_Test(dir, multiple):
return testPath
# def make_2D_dataset_Custom_Test(dir):
# """ make [I0,I1,It,t,scene_folder] """
# """ 1D (accumulated) """
# testPath = []
# for scene_folder in sorted(glob.glob(os.path.join(dir, '*/'))): # [scene1, scene2, scene3, ...]
# frame_folder = sorted(glob.glob(scene_folder + '*.png')) # ex) ['00000.png',...,'00123.png']
# for idx in range(0, len(frame_folder)):
# if idx == len(frame_folder) - 1:
# break
# I0I1It_paths = []
# I0I1It_paths.append(frame_folder[idx]) # I0 (fix)
# I0I1It_paths.append(frame_folder[idx + 1]) # I1 (fix)
# target_t_Idx = frame_folder[idx].split('/')[-1].split('.')[0]+'_x2.png'
# # ex) target t name: 00017.png => '00017_1.png'
# I0I1It_paths.append(os.path.join(scene_folder, target_t_Idx)) # It
# I0I1It_paths.append(0.5) # t
# I0I1It_paths.append(frame_folder[idx].split(os.path.join(dir, ''))[-1].split('/')[0]) # scene1
# testPath.append(I0I1It_paths)
# for asdf in testPath:
# print(asdf)
# return testPath
class Custom_Test(data.Dataset):
def __init__(self, args, multiple):