Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
TencentARC
GitHub Repository: TencentARC/GFPGAN
Path: blob/master/tests/test_ffhq_degradation_dataset.py
884 views
1
import pytest
2
import yaml
3
4
from gfpgan.data.ffhq_degradation_dataset import FFHQDegradationDataset
5
6
7
def test_ffhq_degradation_dataset():
8
9
with open('tests/data/test_ffhq_degradation_dataset.yml', mode='r') as f:
10
opt = yaml.load(f, Loader=yaml.FullLoader)
11
12
dataset = FFHQDegradationDataset(opt)
13
assert dataset.io_backend_opt['type'] == 'disk' # io backend
14
assert len(dataset) == 1 # whether to read correct meta info
15
assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations
16
assert dataset.color_jitter_prob == 1
17
18
# test __getitem__
19
result = dataset.__getitem__(0)
20
# check returned keys
21
expected_keys = ['gt', 'lq', 'gt_path']
22
assert set(expected_keys).issubset(set(result.keys()))
23
# check shape and contents
24
assert result['gt'].shape == (3, 512, 512)
25
assert result['lq'].shape == (3, 512, 512)
26
assert result['gt_path'] == 'tests/data/gt/00000000.png'
27
28
# ------------------ test with probability = 0 -------------------- #
29
opt['color_jitter_prob'] = 0
30
opt['color_jitter_pt_prob'] = 0
31
opt['gray_prob'] = 0
32
opt['io_backend'] = dict(type='disk')
33
dataset = FFHQDegradationDataset(opt)
34
assert dataset.io_backend_opt['type'] == 'disk' # io backend
35
assert len(dataset) == 1 # whether to read correct meta info
36
assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations
37
assert dataset.color_jitter_prob == 0
38
39
# test __getitem__
40
result = dataset.__getitem__(0)
41
# check returned keys
42
expected_keys = ['gt', 'lq', 'gt_path']
43
assert set(expected_keys).issubset(set(result.keys()))
44
# check shape and contents
45
assert result['gt'].shape == (3, 512, 512)
46
assert result['lq'].shape == (3, 512, 512)
47
assert result['gt_path'] == 'tests/data/gt/00000000.png'
48
49
# ------------------ test lmdb backend -------------------- #
50
opt['dataroot_gt'] = 'tests/data/ffhq_gt.lmdb'
51
opt['io_backend'] = dict(type='lmdb')
52
53
dataset = FFHQDegradationDataset(opt)
54
assert dataset.io_backend_opt['type'] == 'lmdb' # io backend
55
assert len(dataset) == 1 # whether to read correct meta info
56
assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations
57
assert dataset.color_jitter_prob == 0
58
59
# test __getitem__
60
result = dataset.__getitem__(0)
61
# check returned keys
62
expected_keys = ['gt', 'lq', 'gt_path']
63
assert set(expected_keys).issubset(set(result.keys()))
64
# check shape and contents
65
assert result['gt'].shape == (3, 512, 512)
66
assert result['lq'].shape == (3, 512, 512)
67
assert result['gt_path'] == '00000000'
68
69
# ------------------ test with crop_components -------------------- #
70
opt['crop_components'] = True
71
opt['component_path'] = 'tests/data/test_eye_mouth_landmarks.pth'
72
opt['eye_enlarge_ratio'] = 1.4
73
opt['gt_gray'] = True
74
opt['io_backend'] = dict(type='lmdb')
75
76
dataset = FFHQDegradationDataset(opt)
77
assert dataset.crop_components is True
78
79
# test __getitem__
80
result = dataset.__getitem__(0)
81
# check returned keys
82
expected_keys = ['gt', 'lq', 'gt_path', 'loc_left_eye', 'loc_right_eye', 'loc_mouth']
83
assert set(expected_keys).issubset(set(result.keys()))
84
# check shape and contents
85
assert result['gt'].shape == (3, 512, 512)
86
assert result['lq'].shape == (3, 512, 512)
87
assert result['gt_path'] == '00000000'
88
assert result['loc_left_eye'].shape == (4, )
89
assert result['loc_right_eye'].shape == (4, )
90
assert result['loc_mouth'].shape == (4, )
91
92
# ------------------ lmdb backend should have paths ends with lmdb -------------------- #
93
with pytest.raises(ValueError):
94
opt['dataroot_gt'] = 'tests/data/gt'
95
opt['io_backend'] = dict(type='lmdb')
96
dataset = FFHQDegradationDataset(opt)
97
98