Spaces:
Sleeping
Sleeping
Upload 49 files
Browse files- .gitattributes +5 -0
- Figs/Params_Time.png +3 -0
- Figs/Pipeline.png +3 -0
- Figs/RGBDD.png +3 -0
- LICENSE +201 -0
- README.md +137 -14
- app.py +156 -0
- checkpoints/NYU_T_X16.pth +3 -0
- checkpoints/NYU_T_X4.pth +3 -0
- checkpoints/NYU_T_X8.pth +3 -0
- checkpoints/NYU_X16.pth +3 -0
- checkpoints/NYU_X4.pth +3 -0
- checkpoints/NYU_X8.pth +3 -0
- checkpoints/RGBDD.pth +3 -0
- checkpoints/RGBDD_Noisy.pth +3 -0
- checkpoints/RGBDD_Noisy_T.pth +3 -0
- checkpoints/RGBDD_T.pth +3 -0
- checkpoints/TOFDSR.pth +3 -0
- checkpoints/TOFDSR_Noisy.pth +3 -0
- checkpoints/TOFDSR_Noisy_T.pth +3 -0
- checkpoints/TOFDSR_T.pth +3 -0
- data/TOFDSR_Test.txt +560 -0
- data/TOFDSR_Train.txt +0 -0
- data/__pycache__/nyu_dataloader.cpython-311.pyc +0 -0
- data/__pycache__/rgbdd_dataloader.cpython-311.pyc +0 -0
- data/nyu_dataloader.py +47 -0
- data/rgbdd_dataloader.py +118 -0
- data/tofdc_dataloader.py +90 -0
- examples/RGB-D-D/20200518160957_LR_fill_depth.png +0 -0
- examples/RGB-D-D/20200518160957_RGB.jpg +0 -0
- examples/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png +0 -0
- examples/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png +3 -0
- net/CR.py +63 -0
- net/__pycache__/CR.cpython-311.pyc +0 -0
- net/__pycache__/deform_conv.cpython-311.pyc +0 -0
- net/__pycache__/dornet.cpython-311.pyc +0 -0
- net/__pycache__/dornet_ddp.cpython-311.pyc +0 -0
- net/deform_conv.py +75 -0
- net/dornet.py +586 -0
- net/dornet_ddp.py +600 -0
- test_img.py +17 -0
- test_img/RGB-D-D/20200518160957_LR_fill_depth.png +0 -0
- test_img/RGB-D-D/20200518160957_RGB.jpg +0 -0
- test_img/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png +0 -0
- test_img/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png +3 -0
- test_nyu_rgbdd.py +103 -0
- test_tofdsr.py +66 -0
- train_nyu_rgbdd.py +158 -0
- train_tofdsr.py +167 -0
- utils.py +37 -0
.gitattributes
CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
examples/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png filter=lfs diff=lfs merge=lfs -text
|
37 |
+
Figs/Params_Time.png filter=lfs diff=lfs merge=lfs -text
|
38 |
+
Figs/Pipeline.png filter=lfs diff=lfs merge=lfs -text
|
39 |
+
Figs/RGBDD.png filter=lfs diff=lfs merge=lfs -text
|
40 |
+
test_img/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png filter=lfs diff=lfs merge=lfs -text
|
Figs/Params_Time.png
ADDED
![]() |
Git LFS Details
|
Figs/Pipeline.png
ADDED
![]() |
Git LFS Details
|
Figs/RGBDD.png
ADDED
![]() |
Git LFS Details
|
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright [yyyy] [name of copyright owner]
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
README.md
CHANGED
@@ -1,14 +1,137 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<p align="center">
|
2 |
+
<h3 align="center"> DORNet: A Degradation Oriented and Regularized Network for <br> Blind Depth Super-Resolution
|
3 |
+
<br>
|
4 |
+
:star2: CVPR 2025 (Oral Presentation) :star2:
|
5 |
+
</h3>
|
6 |
+
|
7 |
+
<p align="center"><a href="https://scholar.google.com/citations?user=VogTuQkAAAAJ&hl=zh-CN">Zhengxue Wang</a><sup>1*</sup>,
|
8 |
+
<a href="https://yanzq95.github.io/">Zhiqiang Yan✉</a><sup>1*</sup>,
|
9 |
+
<a href="https://jspan.github.io/">Jinshan Pan</a><sup>1</sup>,
|
10 |
+
<a href="https://guangweigao.github.io/">Guangwei Gao</a><sup>2</sup>,
|
11 |
+
<a href="https://cszn.github.io/">Kai Zhang</a><sup>3</sup>,
|
12 |
+
<a href="https://scholar.google.com/citations?user=6CIDtZQAAAAJ&hl=zh-CN">Jian Yang✉</a><sup>1</sup> <!--‡-->
|
13 |
+
</p>
|
14 |
+
|
15 |
+
<p align="center">
|
16 |
+
<sup>*</sup>Equal contribution
|
17 |
+
<sup>✉</sup>Corresponding author <br>
|
18 |
+
<sup>1</sup>Nanjing University of Science and Technology
|
19 |
+
<br>
|
20 |
+
<sup>2</sup>Nanjing University of Posts and Telecommunications
|
21 |
+
<sup>3</sup>Nanjing University
|
22 |
+
</p>
|
23 |
+
|
24 |
+
<p align="center">
|
25 |
+
<img src="Figs/Pipeline.png", width="800"/>
|
26 |
+
</p>
|
27 |
+
|
28 |
+
|
29 |
+
Overview of DORNet. Given $\boldsymbol D_{up}$ as input, the degradation learning first encodes it to produce degradation representations $\boldsymbol {\tilde{D}}$ and $\boldsymbol D $. Then, $\boldsymbol {\tilde{D}}$, $\boldsymbol D $, $\boldsymbol D_{lr} $, and $\boldsymbol I_{r}$ are fed into multiple degradation-oriented feature transformation (DOFT) modules, generating the HR depth $\boldsymbol D_{hr}$. Finally, $\boldsymbol D$ and $\boldsymbol D_{hr}$ are sent to the degradation regularization to obtain $\boldsymbol D_{d}$, which is used as input for the degradation loss $\mathcal L_{deg}$ and the contrastive loss $\mathcal L_{cont}$. The degradation regularization only applies during training and adds no extra overhead in testing.
|
30 |
+
|
31 |
+
## Dependencies
|
32 |
+
|
33 |
+
```bash
|
34 |
+
Python==3.11.5
|
35 |
+
PyTorch==2.1.0
|
36 |
+
numpy==1.23.5
|
37 |
+
torchvision==0.16.0
|
38 |
+
scipy==1.11.3
|
39 |
+
Pillow==10.0.1
|
40 |
+
tqdm==4.65.0
|
41 |
+
scikit-image==0.21.0
|
42 |
+
mmcv-full==1.7.2
|
43 |
+
```
|
44 |
+
|
45 |
+
## Datasets
|
46 |
+
|
47 |
+
[RGB-D-D](https://github.com/lingzhi96/RGB-D-D-Dataset)
|
48 |
+
|
49 |
+
[TOFDSR](https://yanzq95.github.io/projectpage/TOFDC/index.html)
|
50 |
+
|
51 |
+
[NYU-v2](https://cs.nyu.edu/~fergus/datasets/nyu_depth_v2.html)
|
52 |
+
|
53 |
+
## Models
|
54 |
+
|
55 |
+
Pretrained models can be found in <a href="https://github.com/yanzq95/DORNet/tree/main/checkpoints">checkpoints</a>.
|
56 |
+
|
57 |
+
|
58 |
+
## Training
|
59 |
+
|
60 |
+
For the RGB-D-D and NYU-v2 datasets, we use a single GPU to train our DORNet. For the larger TOFDC dataset, we employ multiple GPUs to accelerate training.
|
61 |
+
|
62 |
+
### DORNet
|
63 |
+
```
|
64 |
+
Train on real-world RGB-D-D
|
65 |
+
> python train_nyu_rgbdd.py
|
66 |
+
Train on real-world TOFDSR
|
67 |
+
> python -m torch.distributed.launch --nproc_per_node 2 train_tofdsr.py --result_root 'experiment/TOFDSR'
|
68 |
+
Train on synthetic NYU-v2
|
69 |
+
> python train_nyu_rgbdd.py
|
70 |
+
```
|
71 |
+
|
72 |
+
### DORNet-T
|
73 |
+
```
|
74 |
+
Train on real-world RGB-D-D
|
75 |
+
> python train_nyu_rgbdd.py --tiny_model
|
76 |
+
Train on real-world TOFDSR
|
77 |
+
> python -m torch.distributed.launch --nproc_per_node 2 train_tofdsr.py --result_root 'experiment/TOFDSR_T' --tiny_model
|
78 |
+
Train on synthetic NYU-v2
|
79 |
+
> python train_nyu_rgbdd.py --tiny_model
|
80 |
+
```
|
81 |
+
|
82 |
+
## Testing
|
83 |
+
|
84 |
+
### DORNet
|
85 |
+
```
|
86 |
+
Test on real-world RGB-D-D
|
87 |
+
> python test_nyu_rgbdd.py
|
88 |
+
Test on real-world TOFDSR
|
89 |
+
> python test_tofdsr.py
|
90 |
+
Test on synthetic NYU-v2
|
91 |
+
> python test_nyu_rgbdd.py
|
92 |
+
```
|
93 |
+
|
94 |
+
### DORNet-T
|
95 |
+
```
|
96 |
+
Test on real-world RGB-D-D
|
97 |
+
> python test_nyu_rgbdd.py --tiny_model
|
98 |
+
Test on real-world TOFDSR
|
99 |
+
> python test_tofdsr.py --tiny_model
|
100 |
+
Test on synthetic NYU-v2
|
101 |
+
> python test_nyu_rgbdd.py --tiny_model
|
102 |
+
```
|
103 |
+
|
104 |
+
## Experiments
|
105 |
+
|
106 |
+
### Quantitative comparison
|
107 |
+
|
108 |
+
<p align="center">
|
109 |
+
<img src="Figs/Params_Time.png", width="500"/>
|
110 |
+
<br>
|
111 |
+
Complexity on RGB-D-D (w/o Noisy) tested by a 4090 GPU. A larger circle diameter indicates a higher inference time.
|
112 |
+
</p>
|
113 |
+
|
114 |
+
|
115 |
+
|
116 |
+
### Visual comparison
|
117 |
+
|
118 |
+
<p align="center">
|
119 |
+
<img src="Figs/RGBDD.png", width="1000"/>
|
120 |
+
<br>
|
121 |
+
Visual results on the real-world RGB-D-D dataset (w/o Noise).
|
122 |
+
</p>
|
123 |
+
|
124 |
+
|
125 |
+
## Citation
|
126 |
+
|
127 |
+
If our method proves to be of any assistance, please consider citing:
|
128 |
+
```
|
129 |
+
@inproceedings{wang2025dornet,
|
130 |
+
title={DORNet: A Degradation Oriented and Regularized Network for Blind Depth Super-Resolution},
|
131 |
+
author={Wang, Zhengxue and Yan, Zhiqiang and Pan, Jinshan and Gao, Guangwei and Zhang, Kai and Yang, Jian},
|
132 |
+
booktitle={Proceedings of the Computer Vision and Pattern Recognition Conference},
|
133 |
+
pages={15813--15822},
|
134 |
+
year={2025}
|
135 |
+
}
|
136 |
+
```
|
137 |
+
|
app.py
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import gradio as gr
|
2 |
+
import numpy as np
|
3 |
+
import torch
|
4 |
+
import os
|
5 |
+
import cv2
|
6 |
+
from PIL import Image
|
7 |
+
import torchvision.transforms as transforms
|
8 |
+
from net.dornet import Net
|
9 |
+
from net.dornet_ddp import Net_ddp
|
10 |
+
|
11 |
+
# init
|
12 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
13 |
+
net = Net(tiny_model=False).to(device)
|
14 |
+
model_ckpt_map = {
|
15 |
+
"RGB-D-D": "./checkpoints/RGBDD.pth",
|
16 |
+
"TOFDSR": "./checkpoints/TOFDSR.pth"
|
17 |
+
}
|
18 |
+
|
19 |
+
# load model
|
20 |
+
def load_model(model_type: str):
|
21 |
+
global net
|
22 |
+
ckpt_path = model_ckpt_map[model_type]
|
23 |
+
print(f"Loading weights from: {ckpt_path}")
|
24 |
+
if model_type == "RGB-D-D":
|
25 |
+
net = Net(tiny_model=False).to(device)
|
26 |
+
elif model_type == "TOFDSR":
|
27 |
+
net = Net_ddp(tiny_model=False).srn.to(device)
|
28 |
+
else:
|
29 |
+
raise ValueError(f"Unknown model_type: {model_type}")
|
30 |
+
|
31 |
+
net.load_state_dict(torch.load(ckpt_path, map_location=device))
|
32 |
+
net.eval()
|
33 |
+
|
34 |
+
load_model("RGB-D-D")
|
35 |
+
|
36 |
+
|
37 |
+
# data process
|
38 |
+
def preprocess_inputs(rgb_image: Image.Image, lr_depth: Image.Image):
|
39 |
+
image = np.array(rgb_image.convert("RGB")).astype(np.float32)
|
40 |
+
h, w, _ = image.shape
|
41 |
+
lr = np.array(lr_depth.resize((w, h), Image.BICUBIC)).astype(np.float32)
|
42 |
+
# Normalize depth
|
43 |
+
max_out, min_out = 5000.0, 0.0
|
44 |
+
lr = (lr - min_out) / (max_out - min_out)
|
45 |
+
# Normalize RGB
|
46 |
+
maxx, minn = np.max(image), np.min(image)
|
47 |
+
image = (image - minn) / (maxx - minn)
|
48 |
+
# To tensor
|
49 |
+
data_transform = transforms.Compose([transforms.ToTensor()])
|
50 |
+
image = data_transform(image).float()
|
51 |
+
lr = data_transform(np.expand_dims(lr, 2)).float()
|
52 |
+
# Add batch dimension
|
53 |
+
lr = lr.unsqueeze(0).to(device)
|
54 |
+
image = image.unsqueeze(0).to(device)
|
55 |
+
return image, lr, min_out, max_out
|
56 |
+
|
57 |
+
|
58 |
+
# model inference
|
59 |
+
@torch.no_grad()
|
60 |
+
def infer(rgb_image: Image.Image, lr_depth: Image.Image, model_type: str):
|
61 |
+
load_model(model_type) # reset weight
|
62 |
+
|
63 |
+
image, lr, min_out, max_out = preprocess_inputs(rgb_image, lr_depth)
|
64 |
+
|
65 |
+
if model_type == "RGB-D-D":
|
66 |
+
out = net(x_query=lr, rgb=image)
|
67 |
+
elif model_type == "TOFDSR":
|
68 |
+
out, _ = net(x_query=lr, rgb=image)
|
69 |
+
|
70 |
+
pred = out[0, 0] * (max_out - min_out) + min_out
|
71 |
+
pred = pred.cpu().numpy().astype(np.uint16)
|
72 |
+
# raw
|
73 |
+
pred_gray = Image.fromarray(pred)
|
74 |
+
|
75 |
+
# heat
|
76 |
+
pred_norm = (pred - np.min(pred)) / (np.max(pred) - np.min(pred)) * 255
|
77 |
+
pred_vis = pred_norm.astype(np.uint8)
|
78 |
+
pred_heat = cv2.applyColorMap(pred_vis, cv2.COLORMAP_PLASMA)
|
79 |
+
pred_heat = cv2.cvtColor(pred_heat, cv2.COLOR_BGR2RGB)
|
80 |
+
return pred_gray, Image.fromarray(pred_heat)
|
81 |
+
|
82 |
+
|
83 |
+
# Gradio
|
84 |
+
# demo = gr.Interface(
|
85 |
+
# fn=infer,
|
86 |
+
# inputs=[
|
87 |
+
# gr.Image(label="RGB Image", type="pil"),
|
88 |
+
# gr.Image(label="Low-res Depth", type="pil", image_mode="I"),
|
89 |
+
# gr.Dropdown(choices=["RGB-D-D", "TOFDSR"], label="Model Type", value="RGB-D-D")
|
90 |
+
# ],
|
91 |
+
# outputs=[
|
92 |
+
# gr.Image(label="DORNet Output", type="pil", elem_classes=["output-image"]),
|
93 |
+
# gr.Image(label="Normalized Output", type="pil", elem_classes=["output-image"])
|
94 |
+
# ],
|
95 |
+
# examples=[
|
96 |
+
# ["examples/RGB-D-D/20200518160957_RGB.jpg", "examples/RGB-D-D/20200518160957_LR_fill_depth.png", "RGB-D-D"],
|
97 |
+
# ["examples/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png", "examples/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png", "TOFDSR"],
|
98 |
+
# ],
|
99 |
+
# allow_flagging="never",
|
100 |
+
# title="DORNet: A Degradation Oriented and Regularized Network for Blind Depth Super-Resolution \n CVPR 2025 (Oral Presentation)",
|
101 |
+
# css="""
|
102 |
+
# .output-image {
|
103 |
+
# display: flex;
|
104 |
+
# justify-content: center;
|
105 |
+
# align-items: center;
|
106 |
+
# }
|
107 |
+
# .output-image img {
|
108 |
+
# margin: auto;
|
109 |
+
# display: block;
|
110 |
+
# }
|
111 |
+
# """
|
112 |
+
# )
|
113 |
+
#
|
114 |
+
# demo.launch(share=True)
|
115 |
+
Intro = """
|
116 |
+
## DORNet: A Degradation Oriented and Regularized Network for Blind Depth Super-Resolution
|
117 |
+
[📄 Paper](https://arxiv.org/pdf/2410.11666) • [💻 Code](https://github.com/yanzq95/DORNet) • [📦 Model](https://huggingface.co/wzxwyx/DORNet/tree/main)
|
118 |
+
"""
|
119 |
+
|
120 |
+
with gr.Blocks(css="""
|
121 |
+
.output-image {
|
122 |
+
display: flex;
|
123 |
+
justify-content: center;
|
124 |
+
align-items: center;
|
125 |
+
}
|
126 |
+
.output-image img {
|
127 |
+
margin: auto;
|
128 |
+
display: block;
|
129 |
+
}
|
130 |
+
""") as demo:
|
131 |
+
gr.Markdown(Intro)
|
132 |
+
|
133 |
+
with gr.Row():
|
134 |
+
with gr.Column():
|
135 |
+
rgb_input = gr.Image(label="RGB Image", type="pil")
|
136 |
+
lr_input = gr.Image(label="Low-res Depth", type="pil", image_mode="I")
|
137 |
+
with gr.Column():
|
138 |
+
output1 = gr.Image(label="DORNet Output", type="pil", elem_classes=["output-image"])
|
139 |
+
output2 = gr.Image(label="Normalized Output", type="pil", elem_classes=["output-image"])
|
140 |
+
|
141 |
+
model_selector = gr.Dropdown(choices=["RGB-D-D", "TOFDSR"], label="Model Type", value="RGB-D-D")
|
142 |
+
run_button = gr.Button("Run Inference")
|
143 |
+
|
144 |
+
gr.Examples(
|
145 |
+
examples=[
|
146 |
+
["examples/RGB-D-D/20200518160957_RGB.jpg", "examples/RGB-D-D/20200518160957_LR_fill_depth.png", "RGB-D-D"],
|
147 |
+
["examples/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png", "examples/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png", "TOFDSR"],
|
148 |
+
],
|
149 |
+
inputs=[rgb_input, lr_input, model_selector],
|
150 |
+
outputs=[output1, output2],
|
151 |
+
label="Try Examples ↓"
|
152 |
+
)
|
153 |
+
|
154 |
+
run_button.click(fn=infer, inputs=[rgb_input, lr_input, model_selector], outputs=[output1, output2])
|
155 |
+
|
156 |
+
demo.launch(share=True)
|
checkpoints/NYU_T_X16.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3613d2cbd652990525d915972e1c24a885832d3196a929e13aa40833a13f1df8
|
3 |
+
size 1957834
|
checkpoints/NYU_T_X4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f91a4b63870363483c189201196d1625b313fd45b8ed4935d3a55696fdbe7234
|
3 |
+
size 1957834
|
checkpoints/NYU_T_X8.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6cbd299ec3a589c23ded86a9c956cea7d70e7e3593ca3593337836b1e84b36f8
|
3 |
+
size 1958239
|
checkpoints/NYU_X16.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00b68485b244b7579e0b471b8a2d7946493c320eedf000e1cbd334ca8ad55d79
|
3 |
+
size 12359114
|
checkpoints/NYU_X4.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9be0f4a053b724dbbbd140db4a32f32973cb01e97edd47c1100053797bebe262
|
3 |
+
size 12359114
|
checkpoints/NYU_X8.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:26b1d4f972aefc56e74d427345a7ed9a6b61af8319ac4a635af3d7d687a2c1e6
|
3 |
+
size 12359519
|
checkpoints/RGBDD.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c4c8aed8c42a292dc5a852f760cab4d4ecb7d5aceb548671a2a8bc31d7b57aa5
|
3 |
+
size 12359519
|
checkpoints/RGBDD_Noisy.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d1e6994abbf02b8a03de789511580bd0a5e7219f4d9ffa9e84a5a3f6ffefd79f
|
3 |
+
size 12359519
|
checkpoints/RGBDD_Noisy_T.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:626c55ff559f9d6ce6e3f4229c421de467b5d03c9c27f8bb871c40597a21c878
|
3 |
+
size 1958239
|
checkpoints/RGBDD_T.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:38bd20f150d138e395a66cf0a981f6e0e840a995dc4866a6c8b1cadaf5494180
|
3 |
+
size 1958239
|
checkpoints/TOFDSR.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:687f072ac89e88c47c1248db0b3e52c487fe0d198be546b5348038b6b3369e49
|
3 |
+
size 11806363
|
checkpoints/TOFDSR_Noisy.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cabe16fe9593dede63883af64678496ab92a83d3271ef0e23e8471574ce4e1cf
|
3 |
+
size 11806363
|
checkpoints/TOFDSR_Noisy_T.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ac5209fa1e013ab14cc4ad973cc01264549612b5f3361f31274efb9aade88be
|
3 |
+
size 1849078
|
checkpoints/TOFDSR_T.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68e1955b37405cb209ce1b7a8decca81169f7280e58c151c44525c6da902cdbd
|
3 |
+
size 1850267
|
data/TOFDSR_Test.txt
ADDED
@@ -0,0 +1,560 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
TOFDC_split/Test/RGB/20200716_104505_rgb_crop.png,TOFDC_split/Test/GT/20200716_104505_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200716_104505_depth_crop_fill.png
|
2 |
+
TOFDC_split/Test/RGB/20200717_100634_rgb_crop.png,TOFDC_split/Test/GT/20200717_100634_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_100634_depth_crop_fill.png
|
3 |
+
TOFDC_split/Test/RGB/20200818_120711_rgb_crop.png,TOFDC_split/Test/GT/20200818_120711_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_120711_depth_crop_fill.png
|
4 |
+
TOFDC_split/Test/RGB/20200603_123554_rgb_crop.png,TOFDC_split/Test/GT/20200603_123554_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_123554_depth_crop_fill.png
|
5 |
+
TOFDC_split/Test/RGB/2020_09_21_21_29_46_695_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_21_29_46_695_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_21_29_46_695_rgb_depth_crop_fill.png
|
6 |
+
TOFDC_split/Test/RGB/20200818_105105_rgb_crop.png,TOFDC_split/Test/GT/20200818_105105_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_105105_depth_crop_fill.png
|
7 |
+
TOFDC_split/Test/RGB/20200818_152310_rgb_crop.png,TOFDC_split/Test/GT/20200818_152310_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_152310_depth_crop_fill.png
|
8 |
+
TOFDC_split/Test/RGB/20200722_103518_rgb_crop.png,TOFDC_split/Test/GT/20200722_103518_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_103518_depth_crop_fill.png
|
9 |
+
TOFDC_split/Test/RGB/20200721_100558_rgb_crop.png,TOFDC_split/Test/GT/20200721_100558_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_100558_depth_crop_fill.png
|
10 |
+
TOFDC_split/Test/RGB/20200722_102852_rgb_crop.png,TOFDC_split/Test/GT/20200722_102852_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_102852_depth_crop_fill.png
|
11 |
+
TOFDC_split/Test/RGB/20200920_112932_rgb_crop.png,TOFDC_split/Test/GT/20200920_112932_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_112932_depth_crop_fill.png
|
12 |
+
TOFDC_split/Test/RGB/20200831_111743_rgb_crop.png,TOFDC_split/Test/GT/20200831_111743_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200831_111743_depth_crop_fill.png
|
13 |
+
TOFDC_split/Test/RGB/20200722_153225_rgb_crop.png,TOFDC_split/Test/GT/20200722_153225_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_153225_depth_crop_fill.png
|
14 |
+
TOFDC_split/Test/RGB/20200921_140327_rgb_crop.png,TOFDC_split/Test/GT/20200921_140327_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_140327_depth_crop_fill.png
|
15 |
+
TOFDC_split/Test/RGB/20200726_102650_rgb_crop.png,TOFDC_split/Test/GT/20200726_102650_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_102650_depth_crop_fill.png
|
16 |
+
TOFDC_split/Test/RGB/20200604_152722_rgb_crop.png,TOFDC_split/Test/GT/20200604_152722_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_152722_depth_crop_fill.png
|
17 |
+
TOFDC_split/Test/RGB/20200927_202052_rgb_crop.png,TOFDC_split/Test/GT/20200927_202052_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_202052_depth_crop_fill.png
|
18 |
+
TOFDC_split/Test/RGB/20200818_114303_rgb_crop.png,TOFDC_split/Test/GT/20200818_114303_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_114303_depth_crop_fill.png
|
19 |
+
TOFDC_split/Test/RGB/20200820_170902_rgb_crop.png,TOFDC_split/Test/GT/20200820_170902_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_170902_depth_crop_fill.png
|
20 |
+
TOFDC_split/Test/RGB/20200927_194454_rgb_crop.png,TOFDC_split/Test/GT/20200927_194454_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_194454_depth_crop_fill.png
|
21 |
+
TOFDC_split/Test/RGB/2020_09_21_21_50_54_065_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_21_50_54_065_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_21_50_54_065_rgb_depth_crop_fill.png
|
22 |
+
TOFDC_split/Test/RGB/20200927_191907_rgb_crop.png,TOFDC_split/Test/GT/20200927_191907_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_191907_depth_crop_fill.png
|
23 |
+
TOFDC_split/Test/RGB/20200919_190817_rgb_crop.png,TOFDC_split/Test/GT/20200919_190817_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_190817_depth_crop_fill.png
|
24 |
+
TOFDC_split/Test/RGB/20200719_194950_rgb_crop.png,TOFDC_split/Test/GT/20200719_194950_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_194950_depth_crop_fill.png
|
25 |
+
TOFDC_split/Test/RGB/2020_09_11_22_16_01_836_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_22_16_01_836_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_22_16_01_836_rgb_depth_crop_fill.png
|
26 |
+
TOFDC_split/Test/RGB/20200818_155908_rgb_crop.png,TOFDC_split/Test/GT/20200818_155908_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_155908_depth_crop_fill.png
|
27 |
+
TOFDC_split/Test/RGB/20200721_201000_rgb_crop.png,TOFDC_split/Test/GT/20200721_201000_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_201000_depth_crop_fill.png
|
28 |
+
TOFDC_split/Test/RGB/2020_09_09_11_19_02_530_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_19_02_530_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_19_02_530_rgb_depth_crop_fill.png
|
29 |
+
TOFDC_split/Test/RGB/2020_09_08_18_00_27_005_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_18_00_27_005_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_18_00_27_005_rgb_depth_crop_fill.png
|
30 |
+
TOFDC_split/Test/RGB/2020_09_22_15_03_30_825_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_15_03_30_825_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_15_03_30_825_rgb_depth_crop_fill.png
|
31 |
+
TOFDC_split/Test/RGB/2020_09_10_11_09_19_261_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_11_09_19_261_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_11_09_19_261_rgb_depth_crop_fill.png
|
32 |
+
TOFDC_split/Test/RGB/20200719_175258_rgb_crop.png,TOFDC_split/Test/GT/20200719_175258_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_175258_depth_crop_fill.png
|
33 |
+
TOFDC_split/Test/RGB/2020_09_11_22_28_43_143_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_22_28_43_143_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_22_28_43_143_rgb_depth_crop_fill.png
|
34 |
+
TOFDC_split/Test/RGB/20200819_183942_rgb_crop.png,TOFDC_split/Test/GT/20200819_183942_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_183942_depth_crop_fill.png
|
35 |
+
TOFDC_split/Test/RGB/2020_09_11_10_41_16_143_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_10_41_16_143_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_10_41_16_143_rgb_depth_crop_fill.png
|
36 |
+
TOFDC_split/Test/RGB/20200725_154850_rgb_crop.png,TOFDC_split/Test/GT/20200725_154850_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_154850_depth_crop_fill.png
|
37 |
+
TOFDC_split/Test/RGB/20200818_155243_rgb_crop.png,TOFDC_split/Test/GT/20200818_155243_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_155243_depth_crop_fill.png
|
38 |
+
TOFDC_split/Test/RGB/20200824_143942_rgb_crop.png,TOFDC_split/Test/GT/20200824_143942_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_143942_depth_crop_fill.png
|
39 |
+
TOFDC_split/Test/RGB/2020_09_14_19_53_27_896_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_19_53_27_896_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_19_53_27_896_rgb_depth_crop_fill.png
|
40 |
+
TOFDC_split/Test/RGB/20200719_154136_rgb_crop.png,TOFDC_split/Test/GT/20200719_154136_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_154136_depth_crop_fill.png
|
41 |
+
TOFDC_split/Test/RGB/20200919_111906_rgb_crop.png,TOFDC_split/Test/GT/20200919_111906_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_111906_depth_crop_fill.png
|
42 |
+
TOFDC_split/Test/RGB/20200606_145744_rgb_crop.png,TOFDC_split/Test/GT/20200606_145744_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200606_145744_depth_crop_fill.png
|
43 |
+
TOFDC_split/Test/RGB/20200928_152622_rgb_crop.png,TOFDC_split/Test/GT/20200928_152622_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_152622_depth_crop_fill.png
|
44 |
+
TOFDC_split/Test/RGB/20200928_152109_rgb_crop.png,TOFDC_split/Test/GT/20200928_152109_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_152109_depth_crop_fill.png
|
45 |
+
TOFDC_split/Test/RGB/20200718_103458_rgb_crop.png,TOFDC_split/Test/GT/20200718_103458_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_103458_depth_crop_fill.png
|
46 |
+
TOFDC_split/Test/RGB/20200819_141832_rgb_crop.png,TOFDC_split/Test/GT/20200819_141832_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_141832_depth_crop_fill.png
|
47 |
+
TOFDC_split/Test/RGB/20200719_152930_rgb_crop.png,TOFDC_split/Test/GT/20200719_152930_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_152930_depth_crop_fill.png
|
48 |
+
TOFDC_split/Test/RGB/20200818_114851_rgb_crop.png,TOFDC_split/Test/GT/20200818_114851_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_114851_depth_crop_fill.png
|
49 |
+
TOFDC_split/Test/RGB/20200719_105752_rgb_crop.png,TOFDC_split/Test/GT/20200719_105752_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_105752_depth_crop_fill.png
|
50 |
+
TOFDC_split/Test/RGB/20200818_123419_rgb_crop.png,TOFDC_split/Test/GT/20200818_123419_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_123419_depth_crop_fill.png
|
51 |
+
TOFDC_split/Test/RGB/2020_09_11_11_11_15_515_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_11_11_15_515_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_11_11_15_515_rgb_depth_crop_fill.png
|
52 |
+
TOFDC_split/Test/RGB/20200725_161537_rgb_crop.png,TOFDC_split/Test/GT/20200725_161537_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_161537_depth_crop_fill.png
|
53 |
+
TOFDC_split/Test/RGB/20200820_164628_rgb_crop.png,TOFDC_split/Test/GT/20200820_164628_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_164628_depth_crop_fill.png
|
54 |
+
TOFDC_split/Test/RGB/20200927_152821_rgb_crop.png,TOFDC_split/Test/GT/20200927_152821_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_152821_depth_crop_fill.png
|
55 |
+
TOFDC_split/Test/RGB/20200824_121012_rgb_crop.png,TOFDC_split/Test/GT/20200824_121012_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_121012_depth_crop_fill.png
|
56 |
+
TOFDC_split/Test/RGB/2020_09_21_20_42_08_839_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_20_42_08_839_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_20_42_08_839_rgb_depth_crop_fill.png
|
57 |
+
TOFDC_split/Test/RGB/2020_09_09_11_26_16_634_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_26_16_634_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_26_16_634_rgb_depth_crop_fill.png
|
58 |
+
TOFDC_split/Test/RGB/2020_09_09_11_57_42_136_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_57_42_136_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_57_42_136_rgb_depth_crop_fill.png
|
59 |
+
TOFDC_split/Test/RGB/2020_09_26_11_40_36_466_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_11_40_36_466_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_11_40_36_466_rgb_depth_crop_fill.png
|
60 |
+
TOFDC_split/Test/RGB/2020_09_08_15_55_37_230_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_15_55_37_230_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_15_55_37_230_rgb_depth_crop_fill.png
|
61 |
+
TOFDC_split/Test/RGB/2020_09_12_20_59_31_547_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_59_31_547_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_59_31_547_rgb_depth_crop_fill.png
|
62 |
+
TOFDC_split/Test/RGB/20200603_134220_rgb_crop.png,TOFDC_split/Test/GT/20200603_134220_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_134220_depth_crop_fill.png
|
63 |
+
TOFDC_split/Test/RGB/2020_09_21_20_13_06_978_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_20_13_06_978_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_20_13_06_978_rgb_depth_crop_fill.png
|
64 |
+
TOFDC_split/Test/RGB/2020_09_12_16_08_56_136_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_16_08_56_136_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_16_08_56_136_rgb_depth_crop_fill.png
|
65 |
+
TOFDC_split/Test/RGB/20200820_202007_rgb_crop.png,TOFDC_split/Test/GT/20200820_202007_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_202007_depth_crop_fill.png
|
66 |
+
TOFDC_split/Test/RGB/2020_09_08_10_39_34_461_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_10_39_34_461_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_10_39_34_461_rgb_depth_crop_fill.png
|
67 |
+
TOFDC_split/Test/RGB/2020_09_08_11_34_21_487_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_11_34_21_487_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_11_34_21_487_rgb_depth_crop_fill.png
|
68 |
+
TOFDC_split/Test/RGB/20200819_105135_rgb_crop.png,TOFDC_split/Test/GT/20200819_105135_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_105135_depth_crop_fill.png
|
69 |
+
TOFDC_split/Test/RGB/20200601_194309_rgb_crop.png,TOFDC_split/Test/GT/20200601_194309_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_194309_depth_crop_fill.png
|
70 |
+
TOFDC_split/Test/RGB/20200719_145024_rgb_crop.png,TOFDC_split/Test/GT/20200719_145024_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_145024_depth_crop_fill.png
|
71 |
+
TOFDC_split/Test/RGB/20200819_122505_rgb_crop.png,TOFDC_split/Test/GT/20200819_122505_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_122505_depth_crop_fill.png
|
72 |
+
TOFDC_split/Test/RGB/20200818_160711_rgb_crop.png,TOFDC_split/Test/GT/20200818_160711_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_160711_depth_crop_fill.png
|
73 |
+
TOFDC_split/Test/RGB/20200819_170025_rgb_crop.png,TOFDC_split/Test/GT/20200819_170025_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_170025_depth_crop_fill.png
|
74 |
+
TOFDC_split/Test/RGB/20200920_113836_rgb_crop.png,TOFDC_split/Test/GT/20200920_113836_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_113836_depth_crop_fill.png
|
75 |
+
TOFDC_split/Test/RGB/20200721_162455_rgb_crop.png,TOFDC_split/Test/GT/20200721_162455_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_162455_depth_crop_fill.png
|
76 |
+
TOFDC_split/Test/RGB/20200717_162540_rgb_crop.png,TOFDC_split/Test/GT/20200717_162540_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_162540_depth_crop_fill.png
|
77 |
+
TOFDC_split/Test/RGB/20200818_160441_rgb_crop.png,TOFDC_split/Test/GT/20200818_160441_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_160441_depth_crop_fill.png
|
78 |
+
TOFDC_split/Test/RGB/20200824_164044_rgb_crop.png,TOFDC_split/Test/GT/20200824_164044_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_164044_depth_crop_fill.png
|
79 |
+
TOFDC_split/Test/RGB/20200824_161533_rgb_crop.png,TOFDC_split/Test/GT/20200824_161533_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_161533_depth_crop_fill.png
|
80 |
+
TOFDC_split/Test/RGB/20200919_110444_rgb_crop.png,TOFDC_split/Test/GT/20200919_110444_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_110444_depth_crop_fill.png
|
81 |
+
TOFDC_split/Test/RGB/20200723_205806_rgb_crop.png,TOFDC_split/Test/GT/20200723_205806_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_205806_depth_crop_fill.png
|
82 |
+
TOFDC_split/Test/RGB/20200718_150137_rgb_crop.png,TOFDC_split/Test/GT/20200718_150137_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_150137_depth_crop_fill.png
|
83 |
+
TOFDC_split/Test/RGB/20200927_202911_rgb_crop.png,TOFDC_split/Test/GT/20200927_202911_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_202911_depth_crop_fill.png
|
84 |
+
TOFDC_split/Test/RGB/20200723_102945_rgb_crop.png,TOFDC_split/Test/GT/20200723_102945_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_102945_depth_crop_fill.png
|
85 |
+
TOFDC_split/Test/RGB/20200726_105624_rgb_crop.png,TOFDC_split/Test/GT/20200726_105624_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_105624_depth_crop_fill.png
|
86 |
+
TOFDC_split/Test/RGB/2020_09_13_19_31_37_869_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_31_37_869_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_31_37_869_rgb_depth_crop_fill.png
|
87 |
+
TOFDC_split/Test/RGB/20200920_170617_rgb_crop.png,TOFDC_split/Test/GT/20200920_170617_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_170617_depth_crop_fill.png
|
88 |
+
TOFDC_split/Test/RGB/20200927_193927_rgb_crop.png,TOFDC_split/Test/GT/20200927_193927_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_193927_depth_crop_fill.png
|
89 |
+
TOFDC_split/Test/RGB/2020_09_08_16_07_41_800_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_16_07_41_800_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_16_07_41_800_rgb_depth_crop_fill.png
|
90 |
+
TOFDC_split/Test/RGB/20200603_131120_rgb_crop.png,TOFDC_split/Test/GT/20200603_131120_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_131120_depth_crop_fill.png
|
91 |
+
TOFDC_split/Test/RGB/20200721_111013_rgb_crop.png,TOFDC_split/Test/GT/20200721_111013_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_111013_depth_crop_fill.png
|
92 |
+
TOFDC_split/Test/RGB/20200601_110530_rgb_crop.png,TOFDC_split/Test/GT/20200601_110530_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_110530_depth_crop_fill.png
|
93 |
+
TOFDC_split/Test/RGB/2020_09_10_22_49_02_519_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_22_49_02_519_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_22_49_02_519_rgb_depth_crop_fill.png
|
94 |
+
TOFDC_split/Test/RGB/20200927_155929_rgb_crop.png,TOFDC_split/Test/GT/20200927_155929_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_155929_depth_crop_fill.png
|
95 |
+
TOFDC_split/Test/RGB/20200820_100501_rgb_crop.png,TOFDC_split/Test/GT/20200820_100501_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_100501_depth_crop_fill.png
|
96 |
+
TOFDC_split/Test/RGB/2020_09_09_11_45_50_523_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_45_50_523_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_45_50_523_rgb_depth_crop_fill.png
|
97 |
+
TOFDC_split/Test/RGB/2020_09_23_22_33_24_383_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_33_24_383_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_33_24_383_rgb_depth_crop_fill.png
|
98 |
+
TOFDC_split/Test/RGB/2020_09_13_21_28_52_074_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_21_28_52_074_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_21_28_52_074_rgb_depth_crop_fill.png
|
99 |
+
TOFDC_split/Test/RGB/2020_09_26_10_32_39_351_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_10_32_39_351_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_10_32_39_351_rgb_depth_crop_fill.png
|
100 |
+
TOFDC_split/Test/RGB/20200927_153446_rgb_crop.png,TOFDC_split/Test/GT/20200927_153446_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_153446_depth_crop_fill.png
|
101 |
+
TOFDC_split/Test/RGB/20200818_213819_rgb_crop.png,TOFDC_split/Test/GT/20200818_213819_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_213819_depth_crop_fill.png
|
102 |
+
TOFDC_split/Test/RGB/2020_09_22_10_05_08_603_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_05_08_603_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_05_08_603_rgb_depth_crop_fill.png
|
103 |
+
TOFDC_split/Test/RGB/20200919_170833_rgb_crop.png,TOFDC_split/Test/GT/20200919_170833_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_170833_depth_crop_fill.png
|
104 |
+
TOFDC_split/Test/RGB/20200727_165805_rgb_crop.png,TOFDC_split/Test/GT/20200727_165805_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_165805_depth_crop_fill.png
|
105 |
+
TOFDC_split/Test/RGB/20200819_145319_rgb_crop.png,TOFDC_split/Test/GT/20200819_145319_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_145319_depth_crop_fill.png
|
106 |
+
TOFDC_split/Test/RGB/20200818_151827_rgb_crop.png,TOFDC_split/Test/GT/20200818_151827_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_151827_depth_crop_fill.png
|
107 |
+
TOFDC_split/Test/RGB/20200819_153248_rgb_crop.png,TOFDC_split/Test/GT/20200819_153248_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_153248_depth_crop_fill.png
|
108 |
+
TOFDC_split/Test/RGB/2020_09_26_14_21_00_533_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_14_21_00_533_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_14_21_00_533_rgb_depth_crop_fill.png
|
109 |
+
TOFDC_split/Test/RGB/20200927_203648_rgb_crop.png,TOFDC_split/Test/GT/20200927_203648_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_203648_depth_crop_fill.png
|
110 |
+
TOFDC_split/Test/RGB/2020_09_13_21_18_21_224_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_21_18_21_224_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_21_18_21_224_rgb_depth_crop_fill.png
|
111 |
+
TOFDC_split/Test/RGB/20200718_153745_rgb_crop.png,TOFDC_split/Test/GT/20200718_153745_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_153745_depth_crop_fill.png
|
112 |
+
TOFDC_split/Test/RGB/20200818_154707_rgb_crop.png,TOFDC_split/Test/GT/20200818_154707_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_154707_depth_crop_fill.png
|
113 |
+
TOFDC_split/Test/RGB/20200927_142826_rgb_crop.png,TOFDC_split/Test/GT/20200927_142826_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_142826_depth_crop_fill.png
|
114 |
+
TOFDC_split/Test/RGB/20200820_111453_rgb_crop.png,TOFDC_split/Test/GT/20200820_111453_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_111453_depth_crop_fill.png
|
115 |
+
TOFDC_split/Test/RGB/20200601_151641_rgb_crop.png,TOFDC_split/Test/GT/20200601_151641_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_151641_depth_crop_fill.png
|
116 |
+
TOFDC_split/Test/RGB/20200726_103105_rgb_crop.png,TOFDC_split/Test/GT/20200726_103105_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_103105_depth_crop_fill.png
|
117 |
+
TOFDC_split/Test/RGB/20200820_200355_rgb_crop.png,TOFDC_split/Test/GT/20200820_200355_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_200355_depth_crop_fill.png
|
118 |
+
TOFDC_split/Test/RGB/20200717_111412_rgb_crop.png,TOFDC_split/Test/GT/20200717_111412_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_111412_depth_crop_fill.png
|
119 |
+
TOFDC_split/Test/RGB/20200726_110251_rgb_crop.png,TOFDC_split/Test/GT/20200726_110251_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_110251_depth_crop_fill.png
|
120 |
+
TOFDC_split/Test/RGB/2020_09_10_16_26_44_874_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_16_26_44_874_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_16_26_44_874_rgb_depth_crop_fill.png
|
121 |
+
TOFDC_split/Test/RGB/20200722_201920_rgb_crop.png,TOFDC_split/Test/GT/20200722_201920_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_201920_depth_crop_fill.png
|
122 |
+
TOFDC_split/Test/RGB/2020_09_09_17_02_43_395_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_17_02_43_395_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_17_02_43_395_rgb_depth_crop_fill.png
|
123 |
+
TOFDC_split/Test/RGB/2020_09_13_19_47_04_644_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_47_04_644_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_47_04_644_rgb_depth_crop_fill.png
|
124 |
+
TOFDC_split/Test/RGB/2020_09_08_11_22_28_946_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_11_22_28_946_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_11_22_28_946_rgb_depth_crop_fill.png
|
125 |
+
TOFDC_split/Test/RGB/2020_09_21_19_57_19_431_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_19_57_19_431_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_19_57_19_431_rgb_depth_crop_fill.png
|
126 |
+
TOFDC_split/Test/RGB/20200921_140059_rgb_crop.png,TOFDC_split/Test/GT/20200921_140059_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_140059_depth_crop_fill.png
|
127 |
+
TOFDC_split/Test/RGB/20200725_185201_rgb_crop.png,TOFDC_split/Test/GT/20200725_185201_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_185201_depth_crop_fill.png
|
128 |
+
TOFDC_split/Test/RGB/20200818_215051_rgb_crop.png,TOFDC_split/Test/GT/20200818_215051_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_215051_depth_crop_fill.png
|
129 |
+
TOFDC_split/Test/RGB/20200831_104743_rgb_crop.png,TOFDC_split/Test/GT/20200831_104743_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200831_104743_depth_crop_fill.png
|
130 |
+
TOFDC_split/Test/RGB/20200607_170307_rgb_crop.png,TOFDC_split/Test/GT/20200607_170307_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200607_170307_depth_crop_fill.png
|
131 |
+
TOFDC_split/Test/RGB/20200604_150753_rgb_crop.png,TOFDC_split/Test/GT/20200604_150753_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_150753_depth_crop_fill.png
|
132 |
+
TOFDC_split/Test/RGB/20200720_151834_rgb_crop.png,TOFDC_split/Test/GT/20200720_151834_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200720_151834_depth_crop_fill.png
|
133 |
+
TOFDC_split/Test/RGB/20200818_121442_rgb_crop.png,TOFDC_split/Test/GT/20200818_121442_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_121442_depth_crop_fill.png
|
134 |
+
TOFDC_split/Test/RGB/20200725_160055_rgb_crop.png,TOFDC_split/Test/GT/20200725_160055_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_160055_depth_crop_fill.png
|
135 |
+
TOFDC_split/Test/RGB/20200601_095208_rgb_crop.png,TOFDC_split/Test/GT/20200601_095208_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_095208_depth_crop_fill.png
|
136 |
+
TOFDC_split/Test/RGB/20200920_152246_rgb_crop.png,TOFDC_split/Test/GT/20200920_152246_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_152246_depth_crop_fill.png
|
137 |
+
TOFDC_split/Test/RGB/20200927_134410_rgb_crop.png,TOFDC_split/Test/GT/20200927_134410_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_134410_depth_crop_fill.png
|
138 |
+
TOFDC_split/Test/RGB/20200819_122215_rgb_crop.png,TOFDC_split/Test/GT/20200819_122215_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_122215_depth_crop_fill.png
|
139 |
+
TOFDC_split/Test/RGB/20200824_151316_rgb_crop.png,TOFDC_split/Test/GT/20200824_151316_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_151316_depth_crop_fill.png
|
140 |
+
TOFDC_split/Test/RGB/20200921_142741_rgb_crop.png,TOFDC_split/Test/GT/20200921_142741_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_142741_depth_crop_fill.png
|
141 |
+
TOFDC_split/Test/RGB/20200920_190514_rgb_crop.png,TOFDC_split/Test/GT/20200920_190514_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_190514_depth_crop_fill.png
|
142 |
+
TOFDC_split/Test/RGB/20200818_150602_rgb_crop.png,TOFDC_split/Test/GT/20200818_150602_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_150602_depth_crop_fill.png
|
143 |
+
TOFDC_split/Test/RGB/20200927_155110_rgb_crop.png,TOFDC_split/Test/GT/20200927_155110_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_155110_depth_crop_fill.png
|
144 |
+
TOFDC_split/Test/RGB/20200928_151401_rgb_crop.png,TOFDC_split/Test/GT/20200928_151401_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_151401_depth_crop_fill.png
|
145 |
+
TOFDC_split/Test/RGB/20200920_172019_rgb_crop.png,TOFDC_split/Test/GT/20200920_172019_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_172019_depth_crop_fill.png
|
146 |
+
TOFDC_split/Test/RGB/20200602_154724_rgb_crop.png,TOFDC_split/Test/GT/20200602_154724_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_154724_depth_crop_fill.png
|
147 |
+
TOFDC_split/Test/RGB/20200824_171017_rgb_crop.png,TOFDC_split/Test/GT/20200824_171017_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_171017_depth_crop_fill.png
|
148 |
+
TOFDC_split/Test/RGB/20200820_103917_rgb_crop.png,TOFDC_split/Test/GT/20200820_103917_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_103917_depth_crop_fill.png
|
149 |
+
TOFDC_split/Test/RGB/20200531_170527_rgb_crop.png,TOFDC_split/Test/GT/20200531_170527_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200531_170527_depth_crop_fill.png
|
150 |
+
TOFDC_split/Test/RGB/2020_09_09_11_35_41_090_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_35_41_090_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_35_41_090_rgb_depth_crop_fill.png
|
151 |
+
TOFDC_split/Test/RGB/2020_09_11_22_09_17_091_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_22_09_17_091_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_22_09_17_091_rgb_depth_crop_fill.png
|
152 |
+
TOFDC_split/Test/RGB/2020_09_21_21_12_25_296_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_21_12_25_296_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_21_12_25_296_rgb_depth_crop_fill.png
|
153 |
+
TOFDC_split/Test/RGB/2020_09_09_20_50_10_664_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_20_50_10_664_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_20_50_10_664_rgb_depth_crop_fill.png
|
154 |
+
TOFDC_split/Test/RGB/2020_09_22_09_57_03_131_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_09_57_03_131_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_09_57_03_131_rgb_depth_crop_fill.png
|
155 |
+
TOFDC_split/Test/RGB/20200818_144858_rgb_crop.png,TOFDC_split/Test/GT/20200818_144858_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_144858_depth_crop_fill.png
|
156 |
+
TOFDC_split/Test/RGB/20200819_184628_rgb_crop.png,TOFDC_split/Test/GT/20200819_184628_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_184628_depth_crop_fill.png
|
157 |
+
TOFDC_split/Test/RGB/20200919_195247_rgb_crop.png,TOFDC_split/Test/GT/20200919_195247_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_195247_depth_crop_fill.png
|
158 |
+
TOFDC_split/Test/RGB/2020_09_11_11_54_40_367_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_11_54_40_367_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_11_54_40_367_rgb_depth_crop_fill.png
|
159 |
+
TOFDC_split/Test/RGB/2020_09_22_10_35_16_382_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_35_16_382_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_35_16_382_rgb_depth_crop_fill.png
|
160 |
+
TOFDC_split/Test/RGB/2020_09_26_11_23_20_457_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_11_23_20_457_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_11_23_20_457_rgb_depth_crop_fill.png
|
161 |
+
TOFDC_split/Test/RGB/2020_09_12_11_42_17_374_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_11_42_17_374_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_11_42_17_374_rgb_depth_crop_fill.png
|
162 |
+
TOFDC_split/Test/RGB/2020_09_26_15_17_14_600_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_15_17_14_600_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_15_17_14_600_rgb_depth_crop_fill.png
|
163 |
+
TOFDC_split/Test/RGB/2020_09_10_21_53_39_630_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_21_53_39_630_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_21_53_39_630_rgb_depth_crop_fill.png
|
164 |
+
TOFDC_split/Test/RGB/20200717_194846_rgb_crop.png,TOFDC_split/Test/GT/20200717_194846_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_194846_depth_crop_fill.png
|
165 |
+
TOFDC_split/Test/RGB/20200919_094258_rgb_crop.png,TOFDC_split/Test/GT/20200919_094258_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_094258_depth_crop_fill.png
|
166 |
+
TOFDC_split/Test/RGB/20200819_180228_rgb_crop.png,TOFDC_split/Test/GT/20200819_180228_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_180228_depth_crop_fill.png
|
167 |
+
TOFDC_split/Test/RGB/20200920_152749_rgb_crop.png,TOFDC_split/Test/GT/20200920_152749_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_152749_depth_crop_fill.png
|
168 |
+
TOFDC_split/Test/RGB/20200725_163942_rgb_crop.png,TOFDC_split/Test/GT/20200725_163942_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_163942_depth_crop_fill.png
|
169 |
+
TOFDC_split/Test/RGB/20200920_164908_rgb_crop.png,TOFDC_split/Test/GT/20200920_164908_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_164908_depth_crop_fill.png
|
170 |
+
TOFDC_split/Test/RGB/20200819_204159_rgb_crop.png,TOFDC_split/Test/GT/20200819_204159_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_204159_depth_crop_fill.png
|
171 |
+
TOFDC_split/Test/RGB/20200928_150743_rgb_crop.png,TOFDC_split/Test/GT/20200928_150743_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_150743_depth_crop_fill.png
|
172 |
+
TOFDC_split/Test/RGB/20200725_094624_rgb_crop.png,TOFDC_split/Test/GT/20200725_094624_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_094624_depth_crop_fill.png
|
173 |
+
TOFDC_split/Test/RGB/20200606_155808_rgb_crop.png,TOFDC_split/Test/GT/20200606_155808_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200606_155808_depth_crop_fill.png
|
174 |
+
TOFDC_split/Test/RGB/20200927_153237_rgb_crop.png,TOFDC_split/Test/GT/20200927_153237_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_153237_depth_crop_fill.png
|
175 |
+
TOFDC_split/Test/RGB/20200717_160548_rgb_crop.png,TOFDC_split/Test/GT/20200717_160548_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_160548_depth_crop_fill.png
|
176 |
+
TOFDC_split/Test/RGB/20200928_151544_rgb_crop.png,TOFDC_split/Test/GT/20200928_151544_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_151544_depth_crop_fill.png
|
177 |
+
TOFDC_split/Test/RGB/20200819_181407_rgb_crop.png,TOFDC_split/Test/GT/20200819_181407_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_181407_depth_crop_fill.png
|
178 |
+
TOFDC_split/Test/RGB/20200824_163933_rgb_crop.png,TOFDC_split/Test/GT/20200824_163933_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_163933_depth_crop_fill.png
|
179 |
+
TOFDC_split/Test/RGB/20200819_202647_rgb_crop.png,TOFDC_split/Test/GT/20200819_202647_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_202647_depth_crop_fill.png
|
180 |
+
TOFDC_split/Test/RGB/20200719_145453_rgb_crop.png,TOFDC_split/Test/GT/20200719_145453_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_145453_depth_crop_fill.png
|
181 |
+
TOFDC_split/Test/RGB/2020_09_21_21_00_09_208_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_21_00_09_208_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_21_00_09_208_rgb_depth_crop_fill.png
|
182 |
+
TOFDC_split/Test/RGB/20200819_145821_rgb_crop.png,TOFDC_split/Test/GT/20200819_145821_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_145821_depth_crop_fill.png
|
183 |
+
TOFDC_split/Test/RGB/2020_09_11_22_04_04_209_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_22_04_04_209_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_22_04_04_209_rgb_depth_crop_fill.png
|
184 |
+
TOFDC_split/Test/RGB/20200818_213907_rgb_crop.png,TOFDC_split/Test/GT/20200818_213907_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_213907_depth_crop_fill.png
|
185 |
+
TOFDC_split/Test/RGB/20200819_134535_rgb_crop.png,TOFDC_split/Test/GT/20200819_134535_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_134535_depth_crop_fill.png
|
186 |
+
TOFDC_split/Test/RGB/20200819_203952_rgb_crop.png,TOFDC_split/Test/GT/20200819_203952_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_203952_depth_crop_fill.png
|
187 |
+
TOFDC_split/Test/RGB/20200927_140000_rgb_crop.png,TOFDC_split/Test/GT/20200927_140000_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_140000_depth_crop_fill.png
|
188 |
+
TOFDC_split/Test/RGB/2020_09_09_11_40_52_856_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_40_52_856_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_40_52_856_rgb_depth_crop_fill.png
|
189 |
+
TOFDC_split/Test/RGB/2020_09_08_17_08_24_083_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_17_08_24_083_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_17_08_24_083_rgb_depth_crop_fill.png
|
190 |
+
TOFDC_split/Test/RGB/20200927_201011_rgb_crop.png,TOFDC_split/Test/GT/20200927_201011_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_201011_depth_crop_fill.png
|
191 |
+
TOFDC_split/Test/RGB/2020_09_12_21_34_27_739_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_21_34_27_739_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_21_34_27_739_rgb_depth_crop_fill.png
|
192 |
+
TOFDC_split/Test/RGB/2020_09_14_23_36_46_156_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_23_36_46_156_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_23_36_46_156_rgb_depth_crop_fill.png
|
193 |
+
TOFDC_split/Test/RGB/20200721_200407_rgb_crop.png,TOFDC_split/Test/GT/20200721_200407_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_200407_depth_crop_fill.png
|
194 |
+
TOFDC_split/Test/RGB/20200717_110213_rgb_crop.png,TOFDC_split/Test/GT/20200717_110213_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_110213_depth_crop_fill.png
|
195 |
+
TOFDC_split/Test/RGB/20200920_170202_rgb_crop.png,TOFDC_split/Test/GT/20200920_170202_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_170202_depth_crop_fill.png
|
196 |
+
TOFDC_split/Test/RGB/2020_09_13_19_05_26_941_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_05_26_941_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_05_26_941_rgb_depth_crop_fill.png
|
197 |
+
TOFDC_split/Test/RGB/20200722_095703_rgb_crop.png,TOFDC_split/Test/GT/20200722_095703_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_095703_depth_crop_fill.png
|
198 |
+
TOFDC_split/Test/RGB/20200819_134306_rgb_crop.png,TOFDC_split/Test/GT/20200819_134306_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_134306_depth_crop_fill.png
|
199 |
+
TOFDC_split/Test/RGB/2020_09_12_11_25_02_700_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_11_25_02_700_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_11_25_02_700_rgb_depth_crop_fill.png
|
200 |
+
TOFDC_split/Test/RGB/20200818_153720_rgb_crop.png,TOFDC_split/Test/GT/20200818_153720_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_153720_depth_crop_fill.png
|
201 |
+
TOFDC_split/Test/RGB/20200819_104804_rgb_crop.png,TOFDC_split/Test/GT/20200819_104804_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_104804_depth_crop_fill.png
|
202 |
+
TOFDC_split/Test/RGB/20200927_203912_rgb_crop.png,TOFDC_split/Test/GT/20200927_203912_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_203912_depth_crop_fill.png
|
203 |
+
TOFDC_split/Test/RGB/2020_09_10_11_31_09_140_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_11_31_09_140_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_11_31_09_140_rgb_depth_crop_fill.png
|
204 |
+
TOFDC_split/Test/RGB/20200818_151128_rgb_crop.png,TOFDC_split/Test/GT/20200818_151128_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_151128_depth_crop_fill.png
|
205 |
+
TOFDC_split/Test/RGB/20200820_144718_rgb_crop.png,TOFDC_split/Test/GT/20200820_144718_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_144718_depth_crop_fill.png
|
206 |
+
TOFDC_split/Test/RGB/20200920_154626_rgb_crop.png,TOFDC_split/Test/GT/20200920_154626_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_154626_depth_crop_fill.png
|
207 |
+
TOFDC_split/Test/RGB/20200927_203237_rgb_crop.png,TOFDC_split/Test/GT/20200927_203237_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_203237_depth_crop_fill.png
|
208 |
+
TOFDC_split/Test/RGB/20200818_123732_rgb_crop.png,TOFDC_split/Test/GT/20200818_123732_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_123732_depth_crop_fill.png
|
209 |
+
TOFDC_split/Test/RGB/20200927_141218_rgb_crop.png,TOFDC_split/Test/GT/20200927_141218_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_141218_depth_crop_fill.png
|
210 |
+
TOFDC_split/Test/RGB/20200927_140327_rgb_crop.png,TOFDC_split/Test/GT/20200927_140327_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_140327_depth_crop_fill.png
|
211 |
+
TOFDC_split/Test/RGB/20200531_165554_rgb_crop.png,TOFDC_split/Test/GT/20200531_165554_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200531_165554_depth_crop_fill.png
|
212 |
+
TOFDC_split/Test/RGB/20200921_143357_rgb_crop.png,TOFDC_split/Test/GT/20200921_143357_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_143357_depth_crop_fill.png
|
213 |
+
TOFDC_split/Test/RGB/20200818_212139_rgb_crop.png,TOFDC_split/Test/GT/20200818_212139_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_212139_depth_crop_fill.png
|
214 |
+
TOFDC_split/Test/RGB/20200903_173732_rgb_crop.png,TOFDC_split/Test/GT/20200903_173732_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200903_173732_depth_crop_fill.png
|
215 |
+
TOFDC_split/Test/RGB/20200723_175217_rgb_crop.png,TOFDC_split/Test/GT/20200723_175217_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_175217_depth_crop_fill.png
|
216 |
+
TOFDC_split/Test/RGB/20200903_173336_rgb_crop.png,TOFDC_split/Test/GT/20200903_173336_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200903_173336_depth_crop_fill.png
|
217 |
+
TOFDC_split/Test/RGB/20200819_121346_rgb_crop.png,TOFDC_split/Test/GT/20200819_121346_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_121346_depth_crop_fill.png
|
218 |
+
TOFDC_split/Test/RGB/20200824_175053_rgb_crop.png,TOFDC_split/Test/GT/20200824_175053_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_175053_depth_crop_fill.png
|
219 |
+
TOFDC_split/Test/RGB/20200603_205658_rgb_crop.png,TOFDC_split/Test/GT/20200603_205658_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_205658_depth_crop_fill.png
|
220 |
+
TOFDC_split/Test/RGB/2020_09_08_16_52_41_055_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_16_52_41_055_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_16_52_41_055_rgb_depth_crop_fill.png
|
221 |
+
TOFDC_split/Test/RGB/2020_09_22_10_23_50_598_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_23_50_598_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_23_50_598_rgb_depth_crop_fill.png
|
222 |
+
TOFDC_split/Test/RGB/2020_09_22_21_26_23_881_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_21_26_23_881_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_21_26_23_881_rgb_depth_crop_fill.png
|
223 |
+
TOFDC_split/Test/RGB/2020_09_08_14_08_15_244_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_14_08_15_244_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_14_08_15_244_rgb_depth_crop_fill.png
|
224 |
+
TOFDC_split/Test/RGB/2020_09_23_21_47_53_262_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_21_47_53_262_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_21_47_53_262_rgb_depth_crop_fill.png
|
225 |
+
TOFDC_split/Test/RGB/2020_09_26_11_11_14_488_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_11_11_14_488_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_11_11_14_488_rgb_depth_crop_fill.png
|
226 |
+
TOFDC_split/Test/RGB/2020_09_08_17_52_29_102_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_17_52_29_102_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_17_52_29_102_rgb_depth_crop_fill.png
|
227 |
+
TOFDC_split/Test/RGB/2020_09_11_17_39_08_025_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_17_39_08_025_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_17_39_08_025_rgb_depth_crop_fill.png
|
228 |
+
TOFDC_split/Test/RGB/2020_09_14_23_21_39_930_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_23_21_39_930_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_23_21_39_930_rgb_depth_crop_fill.png
|
229 |
+
TOFDC_split/Test/RGB/20200920_112301_rgb_crop.png,TOFDC_split/Test/GT/20200920_112301_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_112301_depth_crop_fill.png
|
230 |
+
TOFDC_split/Test/RGB/20200919_094704_rgb_crop.png,TOFDC_split/Test/GT/20200919_094704_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_094704_depth_crop_fill.png
|
231 |
+
TOFDC_split/Test/RGB/2020_09_12_21_23_54_045_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_21_23_54_045_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_21_23_54_045_rgb_depth_crop_fill.png
|
232 |
+
TOFDC_split/Test/RGB/2020_09_12_10_49_10_074_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_10_49_10_074_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_10_49_10_074_rgb_depth_crop_fill.png
|
233 |
+
TOFDC_split/Test/RGB/2020_09_11_11_37_50_280_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_11_37_50_280_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_11_37_50_280_rgb_depth_crop_fill.png
|
234 |
+
TOFDC_split/Test/RGB/20200919_165543_rgb_crop.png,TOFDC_split/Test/GT/20200919_165543_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_165543_depth_crop_fill.png
|
235 |
+
TOFDC_split/Test/RGB/2020_09_11_22_23_15_039_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_22_23_15_039_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_22_23_15_039_rgb_depth_crop_fill.png
|
236 |
+
TOFDC_split/Test/RGB/2020_09_11_11_48_08_203_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_11_48_08_203_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_11_48_08_203_rgb_depth_crop_fill.png
|
237 |
+
TOFDC_split/Test/RGB/2020_09_26_11_31_05_109_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_11_31_05_109_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_11_31_05_109_rgb_depth_crop_fill.png
|
238 |
+
TOFDC_split/Test/RGB/2020_09_06_21_26_30_171_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_06_21_26_30_171_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_06_21_26_30_171_rgb_depth_crop_fill.png
|
239 |
+
TOFDC_split/Test/RGB/20200717_095653_rgb_crop.png,TOFDC_split/Test/GT/20200717_095653_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_095653_depth_crop_fill.png
|
240 |
+
TOFDC_split/Test/RGB/2020_09_22_16_01_03_086_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_16_01_03_086_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_16_01_03_086_rgb_depth_crop_fill.png
|
241 |
+
TOFDC_split/Test/RGB/20200919_152619_rgb_crop.png,TOFDC_split/Test/GT/20200919_152619_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_152619_depth_crop_fill.png
|
242 |
+
TOFDC_split/Test/RGB/20200927_143926_rgb_crop.png,TOFDC_split/Test/GT/20200927_143926_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_143926_depth_crop_fill.png
|
243 |
+
TOFDC_split/Test/RGB/20200927_193552_rgb_crop.png,TOFDC_split/Test/GT/20200927_193552_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_193552_depth_crop_fill.png
|
244 |
+
TOFDC_split/Test/RGB/20200819_175048_rgb_crop.png,TOFDC_split/Test/GT/20200819_175048_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_175048_depth_crop_fill.png
|
245 |
+
TOFDC_split/Test/RGB/2020_09_10_11_19_33_244_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_11_19_33_244_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_11_19_33_244_rgb_depth_crop_fill.png
|
246 |
+
TOFDC_split/Test/RGB/20200727_172827_rgb_crop.png,TOFDC_split/Test/GT/20200727_172827_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_172827_depth_crop_fill.png
|
247 |
+
TOFDC_split/Test/RGB/2020_09_12_15_02_15_336_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_15_02_15_336_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_15_02_15_336_rgb_depth_crop_fill.png
|
248 |
+
TOFDC_split/Test/RGB/20200819_175543_rgb_crop.png,TOFDC_split/Test/GT/20200819_175543_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_175543_depth_crop_fill.png
|
249 |
+
TOFDC_split/Test/RGB/2020_09_21_20_50_54_019_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_20_50_54_019_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_20_50_54_019_rgb_depth_crop_fill.png
|
250 |
+
TOFDC_split/Test/RGB/2020_09_10_22_11_37_156_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_22_11_37_156_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_22_11_37_156_rgb_depth_crop_fill.png
|
251 |
+
TOFDC_split/Test/RGB/20200819_163645_rgb_crop.png,TOFDC_split/Test/GT/20200819_163645_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_163645_depth_crop_fill.png
|
252 |
+
TOFDC_split/Test/RGB/2020_09_09_14_58_41_346_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_14_58_41_346_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_14_58_41_346_rgb_depth_crop_fill.png
|
253 |
+
TOFDC_split/Test/RGB/20200919_100421_rgb_crop.png,TOFDC_split/Test/GT/20200919_100421_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_100421_depth_crop_fill.png
|
254 |
+
TOFDC_split/Test/RGB/20200720_100836_rgb_crop.png,TOFDC_split/Test/GT/20200720_100836_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200720_100836_depth_crop_fill.png
|
255 |
+
TOFDC_split/Test/RGB/20200603_125406_rgb_crop.png,TOFDC_split/Test/GT/20200603_125406_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_125406_depth_crop_fill.png
|
256 |
+
TOFDC_split/Test/RGB/20200605_100852_rgb_crop.png,TOFDC_split/Test/GT/20200605_100852_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200605_100852_depth_crop_fill.png
|
257 |
+
TOFDC_split/Test/RGB/20200831_103209_rgb_crop.png,TOFDC_split/Test/GT/20200831_103209_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200831_103209_depth_crop_fill.png
|
258 |
+
TOFDC_split/Test/RGB/20200820_161810_rgb_crop.png,TOFDC_split/Test/GT/20200820_161810_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_161810_depth_crop_fill.png
|
259 |
+
TOFDC_split/Test/RGB/2020_09_09_16_41_42_774_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_16_41_42_774_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_16_41_42_774_rgb_depth_crop_fill.png
|
260 |
+
TOFDC_split/Test/RGB/20200601_141248_rgb_crop.png,TOFDC_split/Test/GT/20200601_141248_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_141248_depth_crop_fill.png
|
261 |
+
TOFDC_split/Test/RGB/20200820_114434_rgb_crop.png,TOFDC_split/Test/GT/20200820_114434_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_114434_depth_crop_fill.png
|
262 |
+
TOFDC_split/Test/RGB/2020_09_23_22_39_23_658_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_39_23_658_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_39_23_658_rgb_depth_crop_fill.png
|
263 |
+
TOFDC_split/Test/RGB/20200820_160235_rgb_crop.png,TOFDC_split/Test/GT/20200820_160235_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_160235_depth_crop_fill.png
|
264 |
+
TOFDC_split/Test/RGB/20200919_104030_rgb_crop.png,TOFDC_split/Test/GT/20200919_104030_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_104030_depth_crop_fill.png
|
265 |
+
TOFDC_split/Test/RGB/20200604_155501_rgb_crop.png,TOFDC_split/Test/GT/20200604_155501_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_155501_depth_crop_fill.png
|
266 |
+
TOFDC_split/Test/RGB/20200818_214102_rgb_crop.png,TOFDC_split/Test/GT/20200818_214102_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_214102_depth_crop_fill.png
|
267 |
+
TOFDC_split/Test/RGB/20200725_161320_rgb_crop.png,TOFDC_split/Test/GT/20200725_161320_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_161320_depth_crop_fill.png
|
268 |
+
TOFDC_split/Test/RGB/20200903_155040_rgb_crop.png,TOFDC_split/Test/GT/20200903_155040_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200903_155040_depth_crop_fill.png
|
269 |
+
TOFDC_split/Test/RGB/20200717_104214_rgb_crop.png,TOFDC_split/Test/GT/20200717_104214_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_104214_depth_crop_fill.png
|
270 |
+
TOFDC_split/Test/RGB/20200820_142910_rgb_crop.png,TOFDC_split/Test/GT/20200820_142910_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_142910_depth_crop_fill.png
|
271 |
+
TOFDC_split/Test/RGB/20200725_160150_rgb_crop.png,TOFDC_split/Test/GT/20200725_160150_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_160150_depth_crop_fill.png
|
272 |
+
TOFDC_split/Test/RGB/20200824_152521_rgb_crop.png,TOFDC_split/Test/GT/20200824_152521_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_152521_depth_crop_fill.png
|
273 |
+
TOFDC_split/Test/RGB/20200819_200541_rgb_crop.png,TOFDC_split/Test/GT/20200819_200541_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_200541_depth_crop_fill.png
|
274 |
+
TOFDC_split/Test/RGB/20200722_143443_rgb_crop.png,TOFDC_split/Test/GT/20200722_143443_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_143443_depth_crop_fill.png
|
275 |
+
TOFDC_split/Test/RGB/20200819_160230_rgb_crop.png,TOFDC_split/Test/GT/20200819_160230_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_160230_depth_crop_fill.png
|
276 |
+
TOFDC_split/Test/RGB/20200604_161732_rgb_crop.png,TOFDC_split/Test/GT/20200604_161732_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_161732_depth_crop_fill.png
|
277 |
+
TOFDC_split/Test/RGB/2020_09_23_22_47_07_966_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_47_07_966_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_47_07_966_rgb_depth_crop_fill.png
|
278 |
+
TOFDC_split/Test/RGB/20200608_084013_rgb_crop.png,TOFDC_split/Test/GT/20200608_084013_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200608_084013_depth_crop_fill.png
|
279 |
+
TOFDC_split/Test/RGB/20200920_104950_rgb_crop.png,TOFDC_split/Test/GT/20200920_104950_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_104950_depth_crop_fill.png
|
280 |
+
TOFDC_split/Test/RGB/20200927_203124_rgb_crop.png,TOFDC_split/Test/GT/20200927_203124_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_203124_depth_crop_fill.png
|
281 |
+
TOFDC_split/Test/RGB/20200717_201642_rgb_crop.png,TOFDC_split/Test/GT/20200717_201642_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_201642_depth_crop_fill.png
|
282 |
+
TOFDC_split/Test/RGB/20200719_103252_rgb_crop.png,TOFDC_split/Test/GT/20200719_103252_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_103252_depth_crop_fill.png
|
283 |
+
TOFDC_split/Test/RGB/20200920_191008_rgb_crop.png,TOFDC_split/Test/GT/20200920_191008_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_191008_depth_crop_fill.png
|
284 |
+
TOFDC_split/Test/RGB/20200718_201047_rgb_crop.png,TOFDC_split/Test/GT/20200718_201047_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_201047_depth_crop_fill.png
|
285 |
+
TOFDC_split/Test/RGB/20200722_155035_rgb_crop.png,TOFDC_split/Test/GT/20200722_155035_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_155035_depth_crop_fill.png
|
286 |
+
TOFDC_split/Test/RGB/20200603_122349_rgb_crop.png,TOFDC_split/Test/GT/20200603_122349_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_122349_depth_crop_fill.png
|
287 |
+
TOFDC_split/Test/RGB/20200824_154114_rgb_crop.png,TOFDC_split/Test/GT/20200824_154114_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_154114_depth_crop_fill.png
|
288 |
+
TOFDC_split/Test/RGB/20200725_160612_rgb_crop.png,TOFDC_split/Test/GT/20200725_160612_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_160612_depth_crop_fill.png
|
289 |
+
TOFDC_split/Test/RGB/20200606_162252_rgb_crop.png,TOFDC_split/Test/GT/20200606_162252_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200606_162252_depth_crop_fill.png
|
290 |
+
TOFDC_split/Test/RGB/20200920_151138_rgb_crop.png,TOFDC_split/Test/GT/20200920_151138_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_151138_depth_crop_fill.png
|
291 |
+
TOFDC_split/Test/RGB/20200725_163501_rgb_crop.png,TOFDC_split/Test/GT/20200725_163501_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_163501_depth_crop_fill.png
|
292 |
+
TOFDC_split/Test/RGB/20200531_171903_rgb_crop.png,TOFDC_split/Test/GT/20200531_171903_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200531_171903_depth_crop_fill.png
|
293 |
+
TOFDC_split/Test/RGB/20200920_104725_rgb_crop.png,TOFDC_split/Test/GT/20200920_104725_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_104725_depth_crop_fill.png
|
294 |
+
TOFDC_split/Test/RGB/20200820_104816_rgb_crop.png,TOFDC_split/Test/GT/20200820_104816_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_104816_depth_crop_fill.png
|
295 |
+
TOFDC_split/Test/RGB/20200819_182001_rgb_crop.png,TOFDC_split/Test/GT/20200819_182001_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_182001_depth_crop_fill.png
|
296 |
+
TOFDC_split/Test/RGB/20200718_100742_rgb_crop.png,TOFDC_split/Test/GT/20200718_100742_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_100742_depth_crop_fill.png
|
297 |
+
TOFDC_split/Test/RGB/20200602_162319_rgb_crop.png,TOFDC_split/Test/GT/20200602_162319_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_162319_depth_crop_fill.png
|
298 |
+
TOFDC_split/Test/RGB/20200927_134848_rgb_crop.png,TOFDC_split/Test/GT/20200927_134848_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_134848_depth_crop_fill.png
|
299 |
+
TOFDC_split/Test/RGB/20200726_095539_rgb_crop.png,TOFDC_split/Test/GT/20200726_095539_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_095539_depth_crop_fill.png
|
300 |
+
TOFDC_split/Test/RGB/2020_09_11_21_50_06_664_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_21_50_06_664_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_21_50_06_664_rgb_depth_crop_fill.png
|
301 |
+
TOFDC_split/Test/RGB/20200819_183727_rgb_crop.png,TOFDC_split/Test/GT/20200819_183727_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_183727_depth_crop_fill.png
|
302 |
+
TOFDC_split/Test/RGB/2020_09_23_21_34_14_417_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_21_34_14_417_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_21_34_14_417_rgb_depth_crop_fill.png
|
303 |
+
TOFDC_split/Test/RGB/2020_09_10_16_04_24_266_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_16_04_24_266_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_16_04_24_266_rgb_depth_crop_fill.png
|
304 |
+
TOFDC_split/Test/RGB/2020_09_10_10_48_18_225_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_10_48_18_225_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_10_48_18_225_rgb_depth_crop_fill.png
|
305 |
+
TOFDC_split/Test/RGB/2020_09_22_16_36_21_153_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_16_36_21_153_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_16_36_21_153_rgb_depth_crop_fill.png
|
306 |
+
TOFDC_split/Test/RGB/20200819_163855_rgb_crop.png,TOFDC_split/Test/GT/20200819_163855_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_163855_depth_crop_fill.png
|
307 |
+
TOFDC_split/Test/RGB/20200928_153558_rgb_crop.png,TOFDC_split/Test/GT/20200928_153558_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_153558_depth_crop_fill.png
|
308 |
+
TOFDC_split/Test/RGB/2020_09_10_16_17_50_551_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_16_17_50_551_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_16_17_50_551_rgb_depth_crop_fill.png
|
309 |
+
TOFDC_split/Test/RGB/20200727_165514_rgb_crop.png,TOFDC_split/Test/GT/20200727_165514_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_165514_depth_crop_fill.png
|
310 |
+
TOFDC_split/Test/RGB/20200818_150942_rgb_crop.png,TOFDC_split/Test/GT/20200818_150942_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_150942_depth_crop_fill.png
|
311 |
+
TOFDC_split/Test/RGB/2020_09_10_22_22_02_175_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_22_22_02_175_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_22_22_02_175_rgb_depth_crop_fill.png
|
312 |
+
TOFDC_split/Test/RGB/20200819_153344_rgb_crop.png,TOFDC_split/Test/GT/20200819_153344_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_153344_depth_crop_fill.png
|
313 |
+
TOFDC_split/Test/RGB/20200903_175058_rgb_crop.png,TOFDC_split/Test/GT/20200903_175058_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200903_175058_depth_crop_fill.png
|
314 |
+
TOFDC_split/Test/RGB/20200720_195905_rgb_crop.png,TOFDC_split/Test/GT/20200720_195905_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200720_195905_depth_crop_fill.png
|
315 |
+
TOFDC_split/Test/RGB/20200819_130736_rgb_crop.png,TOFDC_split/Test/GT/20200819_130736_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_130736_depth_crop_fill.png
|
316 |
+
TOFDC_split/Test/RGB/20200919_102349_rgb_crop.png,TOFDC_split/Test/GT/20200919_102349_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_102349_depth_crop_fill.png
|
317 |
+
TOFDC_split/Test/RGB/20200718_153607_rgb_crop.png,TOFDC_split/Test/GT/20200718_153607_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_153607_depth_crop_fill.png
|
318 |
+
TOFDC_split/Test/RGB/20200927_161039_rgb_crop.png,TOFDC_split/Test/GT/20200927_161039_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_161039_depth_crop_fill.png
|
319 |
+
TOFDC_split/Test/RGB/20200725_164346_rgb_crop.png,TOFDC_split/Test/GT/20200725_164346_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_164346_depth_crop_fill.png
|
320 |
+
TOFDC_split/Test/RGB/20200919_172225_rgb_crop.png,TOFDC_split/Test/GT/20200919_172225_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_172225_depth_crop_fill.png
|
321 |
+
TOFDC_split/Test/RGB/20200603_094538_rgb_crop.png,TOFDC_split/Test/GT/20200603_094538_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_094538_depth_crop_fill.png
|
322 |
+
TOFDC_split/Test/RGB/20200531_164918_rgb_crop.png,TOFDC_split/Test/GT/20200531_164918_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200531_164918_depth_crop_fill.png
|
323 |
+
TOFDC_split/Test/RGB/20200818_154515_rgb_crop.png,TOFDC_split/Test/GT/20200818_154515_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_154515_depth_crop_fill.png
|
324 |
+
TOFDC_split/Test/RGB/20200726_101759_rgb_crop.png,TOFDC_split/Test/GT/20200726_101759_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_101759_depth_crop_fill.png
|
325 |
+
TOFDC_split/Test/RGB/2020_09_22_15_50_03_213_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_15_50_03_213_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_15_50_03_213_rgb_depth_crop_fill.png
|
326 |
+
TOFDC_split/Test/RGB/20200819_200819_rgb_crop.png,TOFDC_split/Test/GT/20200819_200819_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_200819_depth_crop_fill.png
|
327 |
+
TOFDC_split/Test/RGB/20200927_194033_rgb_crop.png,TOFDC_split/Test/GT/20200927_194033_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_194033_depth_crop_fill.png
|
328 |
+
TOFDC_split/Test/RGB/20200920_164646_rgb_crop.png,TOFDC_split/Test/GT/20200920_164646_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_164646_depth_crop_fill.png
|
329 |
+
TOFDC_split/Test/RGB/20200927_141806_rgb_crop.png,TOFDC_split/Test/GT/20200927_141806_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_141806_depth_crop_fill.png
|
330 |
+
TOFDC_split/Test/RGB/20200726_095225_rgb_crop.png,TOFDC_split/Test/GT/20200726_095225_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_095225_depth_crop_fill.png
|
331 |
+
TOFDC_split/Test/RGB/20200824_173922_rgb_crop.png,TOFDC_split/Test/GT/20200824_173922_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_173922_depth_crop_fill.png
|
332 |
+
TOFDC_split/Test/RGB/20200824_170457_rgb_crop.png,TOFDC_split/Test/GT/20200824_170457_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_170457_depth_crop_fill.png
|
333 |
+
TOFDC_split/Test/RGB/2020_09_23_22_14_40_589_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_14_40_589_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_14_40_589_rgb_depth_crop_fill.png
|
334 |
+
TOFDC_split/Test/RGB/2020_09_23_22_09_59_260_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_09_59_260_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_09_59_260_rgb_depth_crop_fill.png
|
335 |
+
TOFDC_split/Test/RGB/2020_09_12_14_51_15_488_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_14_51_15_488_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_14_51_15_488_rgb_depth_crop_fill.png
|
336 |
+
TOFDC_split/Test/RGB/20200723_202303_rgb_crop.png,TOFDC_split/Test/GT/20200723_202303_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_202303_depth_crop_fill.png
|
337 |
+
TOFDC_split/Test/RGB/2020_09_26_15_10_41_668_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_15_10_41_668_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_15_10_41_668_rgb_depth_crop_fill.png
|
338 |
+
TOFDC_split/Test/RGB/2020_09_09_17_16_13_642_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_17_16_13_642_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_17_16_13_642_rgb_depth_crop_fill.png
|
339 |
+
TOFDC_split/Test/RGB/20200721_165000_rgb_crop.png,TOFDC_split/Test/GT/20200721_165000_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_165000_depth_crop_fill.png
|
340 |
+
TOFDC_split/Test/RGB/20200601_161016_rgb_crop.png,TOFDC_split/Test/GT/20200601_161016_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_161016_depth_crop_fill.png
|
341 |
+
TOFDC_split/Test/RGB/20200921_143002_rgb_crop.png,TOFDC_split/Test/GT/20200921_143002_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_143002_depth_crop_fill.png
|
342 |
+
TOFDC_split/Test/RGB/20200824_111302_rgb_crop.png,TOFDC_split/Test/GT/20200824_111302_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_111302_depth_crop_fill.png
|
343 |
+
TOFDC_split/Test/RGB/2020_09_10_21_43_45_695_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_21_43_45_695_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_21_43_45_695_rgb_depth_crop_fill.png
|
344 |
+
TOFDC_split/Test/RGB/2020_09_12_11_15_44_885_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_11_15_44_885_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_11_15_44_885_rgb_depth_crop_fill.png
|
345 |
+
TOFDC_split/Test/RGB/20200920_112430_rgb_crop.png,TOFDC_split/Test/GT/20200920_112430_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_112430_depth_crop_fill.png
|
346 |
+
TOFDC_split/Test/RGB/20200720_201235_rgb_crop.png,TOFDC_split/Test/GT/20200720_201235_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200720_201235_depth_crop_fill.png
|
347 |
+
TOFDC_split/Test/RGB/2020_09_09_17_11_38_064_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_17_11_38_064_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_17_11_38_064_rgb_depth_crop_fill.png
|
348 |
+
TOFDC_split/Test/RGB/2020_09_22_14_46_09_471_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_14_46_09_471_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_14_46_09_471_rgb_depth_crop_fill.png
|
349 |
+
TOFDC_split/Test/RGB/2020_09_12_15_09_33_394_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_15_09_33_394_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_15_09_33_394_rgb_depth_crop_fill.png
|
350 |
+
TOFDC_split/Test/RGB/20200820_095802_rgb_crop.png,TOFDC_split/Test/GT/20200820_095802_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_095802_depth_crop_fill.png
|
351 |
+
TOFDC_split/Test/RGB/20200719_203741_rgb_crop.png,TOFDC_split/Test/GT/20200719_203741_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_203741_depth_crop_fill.png
|
352 |
+
TOFDC_split/Test/RGB/20200927_202732_rgb_crop.png,TOFDC_split/Test/GT/20200927_202732_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_202732_depth_crop_fill.png
|
353 |
+
TOFDC_split/Test/RGB/20200722_194018_rgb_crop.png,TOFDC_split/Test/GT/20200722_194018_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_194018_depth_crop_fill.png
|
354 |
+
TOFDC_split/Test/RGB/20200604_145419_rgb_crop.png,TOFDC_split/Test/GT/20200604_145419_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_145419_depth_crop_fill.png
|
355 |
+
TOFDC_split/Test/RGB/20200607_102459_rgb_crop.png,TOFDC_split/Test/GT/20200607_102459_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200607_102459_depth_crop_fill.png
|
356 |
+
TOFDC_split/Test/RGB/20200919_100905_rgb_crop.png,TOFDC_split/Test/GT/20200919_100905_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_100905_depth_crop_fill.png
|
357 |
+
TOFDC_split/Test/RGB/20200717_193033_rgb_crop.png,TOFDC_split/Test/GT/20200717_193033_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_193033_depth_crop_fill.png
|
358 |
+
TOFDC_split/Test/RGB/20200818_122903_rgb_crop.png,TOFDC_split/Test/GT/20200818_122903_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_122903_depth_crop_fill.png
|
359 |
+
TOFDC_split/Test/RGB/20200824_165134_rgb_crop.png,TOFDC_split/Test/GT/20200824_165134_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_165134_depth_crop_fill.png
|
360 |
+
TOFDC_split/Test/RGB/20200818_103715_rgb_crop.png,TOFDC_split/Test/GT/20200818_103715_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_103715_depth_crop_fill.png
|
361 |
+
TOFDC_split/Test/RGB/20200605_094604_rgb_crop.png,TOFDC_split/Test/GT/20200605_094604_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200605_094604_depth_crop_fill.png
|
362 |
+
TOFDC_split/Test/RGB/2020_09_08_13_59_59_435_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_13_59_59_435_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png
|
363 |
+
TOFDC_split/Test/RGB/20200920_153941_rgb_crop.png,TOFDC_split/Test/GT/20200920_153941_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_153941_depth_crop_fill.png
|
364 |
+
TOFDC_split/Test/RGB/20200824_162137_rgb_crop.png,TOFDC_split/Test/GT/20200824_162137_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_162137_depth_crop_fill.png
|
365 |
+
TOFDC_split/Test/RGB/20200927_160450_rgb_crop.png,TOFDC_split/Test/GT/20200927_160450_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_160450_depth_crop_fill.png
|
366 |
+
TOFDC_split/Test/RGB/20200927_201454_rgb_crop.png,TOFDC_split/Test/GT/20200927_201454_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_201454_depth_crop_fill.png
|
367 |
+
TOFDC_split/Test/RGB/20200927_140533_rgb_crop.png,TOFDC_split/Test/GT/20200927_140533_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_140533_depth_crop_fill.png
|
368 |
+
TOFDC_split/Test/RGB/20200608_092324_rgb_crop.png,TOFDC_split/Test/GT/20200608_092324_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200608_092324_depth_crop_fill.png
|
369 |
+
TOFDC_split/Test/RGB/2020_09_10_00_00_06_823_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_00_00_06_823_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_00_00_06_823_rgb_depth_crop_fill.png
|
370 |
+
TOFDC_split/Test/RGB/20200727_170712_rgb_crop.png,TOFDC_split/Test/GT/20200727_170712_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_170712_depth_crop_fill.png
|
371 |
+
TOFDC_split/Test/RGB/20200921_141709_rgb_crop.png,TOFDC_split/Test/GT/20200921_141709_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_141709_depth_crop_fill.png
|
372 |
+
TOFDC_split/Test/RGB/20200819_131119_rgb_crop.png,TOFDC_split/Test/GT/20200819_131119_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_131119_depth_crop_fill.png
|
373 |
+
TOFDC_split/Test/RGB/20200824_154613_rgb_crop.png,TOFDC_split/Test/GT/20200824_154613_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_154613_depth_crop_fill.png
|
374 |
+
TOFDC_split/Test/RGB/20200818_120653_rgb_crop.png,TOFDC_split/Test/GT/20200818_120653_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_120653_depth_crop_fill.png
|
375 |
+
TOFDC_split/Test/RGB/2020_09_12_21_11_12_504_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_21_11_12_504_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_21_11_12_504_rgb_depth_crop_fill.png
|
376 |
+
TOFDC_split/Test/RGB/20200721_193506_rgb_crop.png,TOFDC_split/Test/GT/20200721_193506_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_193506_depth_crop_fill.png
|
377 |
+
TOFDC_split/Test/RGB/20200927_141406_rgb_crop.png,TOFDC_split/Test/GT/20200927_141406_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_141406_depth_crop_fill.png
|
378 |
+
TOFDC_split/Test/RGB/20200819_152521_rgb_crop.png,TOFDC_split/Test/GT/20200819_152521_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_152521_depth_crop_fill.png
|
379 |
+
TOFDC_split/Test/RGB/2020_09_12_21_15_12_678_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_21_15_12_678_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_21_15_12_678_rgb_depth_crop_fill.png
|
380 |
+
TOFDC_split/Test/RGB/2020_09_09_16_31_44_521_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_16_31_44_521_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_16_31_44_521_rgb_depth_crop_fill.png
|
381 |
+
TOFDC_split/Test/RGB/2020_09_09_16_54_11_702_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_16_54_11_702_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_16_54_11_702_rgb_depth_crop_fill.png
|
382 |
+
TOFDC_split/Test/RGB/2020_09_26_10_14_23_962_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_10_14_23_962_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_10_14_23_962_rgb_depth_crop_fill.png
|
383 |
+
TOFDC_split/Test/RGB/20200927_195755_rgb_crop.png,TOFDC_split/Test/GT/20200927_195755_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_195755_depth_crop_fill.png
|
384 |
+
TOFDC_split/Test/RGB/2020_09_11_18_41_09_560_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_18_41_09_560_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_18_41_09_560_rgb_depth_crop_fill.png
|
385 |
+
TOFDC_split/Test/RGB/2020_09_12_16_03_39_173_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_16_03_39_173_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_16_03_39_173_rgb_depth_crop_fill.png
|
386 |
+
TOFDC_split/Test/RGB/2020_09_08_11_28_32_209_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_11_28_32_209_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_11_28_32_209_rgb_depth_crop_fill.png
|
387 |
+
TOFDC_split/Test/RGB/20200819_195656_rgb_crop.png,TOFDC_split/Test/GT/20200819_195656_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_195656_depth_crop_fill.png
|
388 |
+
TOFDC_split/Test/RGB/20200602_170101_rgb_crop.png,TOFDC_split/Test/GT/20200602_170101_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_170101_depth_crop_fill.png
|
389 |
+
TOFDC_split/Test/RGB/20200831_110112_rgb_crop.png,TOFDC_split/Test/GT/20200831_110112_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200831_110112_depth_crop_fill.png
|
390 |
+
TOFDC_split/Test/RGB/20200919_192838_rgb_crop.png,TOFDC_split/Test/GT/20200919_192838_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_192838_depth_crop_fill.png
|
391 |
+
TOFDC_split/Test/RGB/20200601_102116_rgb_crop.png,TOFDC_split/Test/GT/20200601_102116_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_102116_depth_crop_fill.png
|
392 |
+
TOFDC_split/Test/RGB/2020_09_08_11_14_07_770_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_11_14_07_770_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_11_14_07_770_rgb_depth_crop_fill.png
|
393 |
+
TOFDC_split/Test/RGB/20200927_153159_rgb_crop.png,TOFDC_split/Test/GT/20200927_153159_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_153159_depth_crop_fill.png
|
394 |
+
TOFDC_split/Test/RGB/20200927_191616_rgb_crop.png,TOFDC_split/Test/GT/20200927_191616_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_191616_depth_crop_fill.png
|
395 |
+
TOFDC_split/Test/RGB/20200716_152956_rgb_crop.png,TOFDC_split/Test/GT/20200716_152956_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200716_152956_depth_crop_fill.png
|
396 |
+
TOFDC_split/Test/RGB/20200824_175438_rgb_crop.png,TOFDC_split/Test/GT/20200824_175438_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_175438_depth_crop_fill.png
|
397 |
+
TOFDC_split/Test/RGB/20200603_085938_rgb_crop.png,TOFDC_split/Test/GT/20200603_085938_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_085938_depth_crop_fill.png
|
398 |
+
TOFDC_split/Test/RGB/20200927_195432_rgb_crop.png,TOFDC_split/Test/GT/20200927_195432_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_195432_depth_crop_fill.png
|
399 |
+
TOFDC_split/Test/RGB/20200919_194324_rgb_crop.png,TOFDC_split/Test/GT/20200919_194324_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_194324_depth_crop_fill.png
|
400 |
+
TOFDC_split/Test/RGB/2020_09_12_20_13_55_135_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_13_55_135_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_13_55_135_rgb_depth_crop_fill.png
|
401 |
+
TOFDC_split/Test/RGB/20200919_154328_rgb_crop.png,TOFDC_split/Test/GT/20200919_154328_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_154328_depth_crop_fill.png
|
402 |
+
TOFDC_split/Test/RGB/2020_09_26_11_04_10_535_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_11_04_10_535_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_11_04_10_535_rgb_depth_crop_fill.png
|
403 |
+
TOFDC_split/Test/RGB/20200928_152852_rgb_crop.png,TOFDC_split/Test/GT/20200928_152852_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_152852_depth_crop_fill.png
|
404 |
+
TOFDC_split/Test/RGB/20200824_170929_rgb_crop.png,TOFDC_split/Test/GT/20200824_170929_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_170929_depth_crop_fill.png
|
405 |
+
TOFDC_split/Test/RGB/20200927_154347_rgb_crop.png,TOFDC_split/Test/GT/20200927_154347_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_154347_depth_crop_fill.png
|
406 |
+
TOFDC_split/Test/RGB/20200818_150838_rgb_crop.png,TOFDC_split/Test/GT/20200818_150838_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_150838_depth_crop_fill.png
|
407 |
+
TOFDC_split/Test/RGB/20200819_204558_rgb_crop.png,TOFDC_split/Test/GT/20200819_204558_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_204558_depth_crop_fill.png
|
408 |
+
TOFDC_split/Test/RGB/20200818_152458_rgb_crop.png,TOFDC_split/Test/GT/20200818_152458_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_152458_depth_crop_fill.png
|
409 |
+
TOFDC_split/Test/RGB/20200927_155427_rgb_crop.png,TOFDC_split/Test/GT/20200927_155427_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_155427_depth_crop_fill.png
|
410 |
+
TOFDC_split/Test/RGB/2020_09_23_21_11_41_067_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_21_11_41_067_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_21_11_41_067_rgb_depth_crop_fill.png
|
411 |
+
TOFDC_split/Test/RGB/20200719_193804_rgb_crop.png,TOFDC_split/Test/GT/20200719_193804_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_193804_depth_crop_fill.png
|
412 |
+
TOFDC_split/Test/RGB/20200927_194253_rgb_crop.png,TOFDC_split/Test/GT/20200927_194253_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_194253_depth_crop_fill.png
|
413 |
+
TOFDC_split/Test/RGB/20200722_193954_rgb_crop.png,TOFDC_split/Test/GT/20200722_193954_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200722_193954_depth_crop_fill.png
|
414 |
+
TOFDC_split/Test/RGB/20200927_134952_rgb_crop.png,TOFDC_split/Test/GT/20200927_134952_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_134952_depth_crop_fill.png
|
415 |
+
TOFDC_split/Test/RGB/20200927_160719_rgb_crop.png,TOFDC_split/Test/GT/20200927_160719_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_160719_depth_crop_fill.png
|
416 |
+
TOFDC_split/Test/RGB/20200607_091021_rgb_crop.png,TOFDC_split/Test/GT/20200607_091021_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200607_091021_depth_crop_fill.png
|
417 |
+
TOFDC_split/Test/RGB/20200603_103139_rgb_crop.png,TOFDC_split/Test/GT/20200603_103139_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_103139_depth_crop_fill.png
|
418 |
+
TOFDC_split/Test/RGB/2020_09_13_19_58_14_264_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_58_14_264_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_58_14_264_rgb_depth_crop_fill.png
|
419 |
+
TOFDC_split/Test/RGB/20200605_145212_rgb_crop.png,TOFDC_split/Test/GT/20200605_145212_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200605_145212_depth_crop_fill.png
|
420 |
+
TOFDC_split/Test/RGB/20200601_203824_rgb_crop.png,TOFDC_split/Test/GT/20200601_203824_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_203824_depth_crop_fill.png
|
421 |
+
TOFDC_split/Test/RGB/2020_09_22_10_47_09_469_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_47_09_469_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_47_09_469_rgb_depth_crop_fill.png
|
422 |
+
TOFDC_split/Test/RGB/2020_09_11_18_37_38_345_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_18_37_38_345_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_18_37_38_345_rgb_depth_crop_fill.png
|
423 |
+
TOFDC_split/Test/RGB/2020_09_08_16_17_32_475_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_16_17_32_475_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_16_17_32_475_rgb_depth_crop_fill.png
|
424 |
+
TOFDC_split/Test/RGB/20200927_200510_rgb_crop.png,TOFDC_split/Test/GT/20200927_200510_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_200510_depth_crop_fill.png
|
425 |
+
TOFDC_split/Test/RGB/2020_09_08_18_12_17_961_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_18_12_17_961_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_18_12_17_961_rgb_depth_crop_fill.png
|
426 |
+
TOFDC_split/Test/RGB/20200602_174024_rgb_crop.png,TOFDC_split/Test/GT/20200602_174024_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_174024_depth_crop_fill.png
|
427 |
+
TOFDC_split/Test/RGB/20200927_202013_rgb_crop.png,TOFDC_split/Test/GT/20200927_202013_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_202013_depth_crop_fill.png
|
428 |
+
TOFDC_split/Test/RGB/20200819_160331_rgb_crop.png,TOFDC_split/Test/GT/20200819_160331_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_160331_depth_crop_fill.png
|
429 |
+
TOFDC_split/Test/RGB/20200919_102820_rgb_crop.png,TOFDC_split/Test/GT/20200919_102820_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_102820_depth_crop_fill.png
|
430 |
+
TOFDC_split/Test/RGB/20200819_202145_rgb_crop.png,TOFDC_split/Test/GT/20200819_202145_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_202145_depth_crop_fill.png
|
431 |
+
TOFDC_split/Test/RGB/20200723_150644_rgb_crop.png,TOFDC_split/Test/GT/20200723_150644_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_150644_depth_crop_fill.png
|
432 |
+
TOFDC_split/Test/RGB/20200818_153114_rgb_crop.png,TOFDC_split/Test/GT/20200818_153114_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_153114_depth_crop_fill.png
|
433 |
+
TOFDC_split/Test/RGB/20200920_171921_rgb_crop.png,TOFDC_split/Test/GT/20200920_171921_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_171921_depth_crop_fill.png
|
434 |
+
TOFDC_split/Test/RGB/20200824_113913_rgb_crop.png,TOFDC_split/Test/GT/20200824_113913_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_113913_depth_crop_fill.png
|
435 |
+
TOFDC_split/Test/RGB/20200818_104721_rgb_crop.png,TOFDC_split/Test/GT/20200818_104721_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_104721_depth_crop_fill.png
|
436 |
+
TOFDC_split/Test/RGB/20200819_103046_rgb_crop.png,TOFDC_split/Test/GT/20200819_103046_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_103046_depth_crop_fill.png
|
437 |
+
TOFDC_split/Test/RGB/2020_09_08_16_03_28_589_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_16_03_28_589_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_16_03_28_589_rgb_depth_crop_fill.png
|
438 |
+
TOFDC_split/Test/RGB/20200726_105128_rgb_crop.png,TOFDC_split/Test/GT/20200726_105128_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200726_105128_depth_crop_fill.png
|
439 |
+
TOFDC_split/Test/RGB/20200928_151105_rgb_crop.png,TOFDC_split/Test/GT/20200928_151105_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_151105_depth_crop_fill.png
|
440 |
+
TOFDC_split/Test/RGB/20200719_204036_rgb_crop.png,TOFDC_split/Test/GT/20200719_204036_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_204036_depth_crop_fill.png
|
441 |
+
TOFDC_split/Test/RGB/20200824_143415_rgb_crop.png,TOFDC_split/Test/GT/20200824_143415_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_143415_depth_crop_fill.png
|
442 |
+
TOFDC_split/Test/RGB/20200824_174420_rgb_crop.png,TOFDC_split/Test/GT/20200824_174420_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_174420_depth_crop_fill.png
|
443 |
+
TOFDC_split/Test/RGB/20200820_145156_rgb_crop.png,TOFDC_split/Test/GT/20200820_145156_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_145156_depth_crop_fill.png
|
444 |
+
TOFDC_split/Test/RGB/2020_09_11_18_54_26_868_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_18_54_26_868_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_18_54_26_868_rgb_depth_crop_fill.png
|
445 |
+
TOFDC_split/Test/RGB/20200818_153411_rgb_crop.png,TOFDC_split/Test/GT/20200818_153411_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_153411_depth_crop_fill.png
|
446 |
+
TOFDC_split/Test/RGB/20200919_104611_rgb_crop.png,TOFDC_split/Test/GT/20200919_104611_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_104611_depth_crop_fill.png
|
447 |
+
TOFDC_split/Test/RGB/20200602_091635_rgb_crop.png,TOFDC_split/Test/GT/20200602_091635_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_091635_depth_crop_fill.png
|
448 |
+
TOFDC_split/Test/RGB/20200927_201911_rgb_crop.png,TOFDC_split/Test/GT/20200927_201911_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_201911_depth_crop_fill.png
|
449 |
+
TOFDC_split/Test/RGB/20200921_141824_rgb_crop.png,TOFDC_split/Test/GT/20200921_141824_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200921_141824_depth_crop_fill.png
|
450 |
+
TOFDC_split/Test/RGB/20200927_143240_rgb_crop.png,TOFDC_split/Test/GT/20200927_143240_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_143240_depth_crop_fill.png
|
451 |
+
TOFDC_split/Test/RGB/20200819_161411_rgb_crop.png,TOFDC_split/Test/GT/20200819_161411_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_161411_depth_crop_fill.png
|
452 |
+
TOFDC_split/Test/RGB/20200725_185103_rgb_crop.png,TOFDC_split/Test/GT/20200725_185103_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_185103_depth_crop_fill.png
|
453 |
+
TOFDC_split/Test/RGB/20200718_160045_rgb_crop.png,TOFDC_split/Test/GT/20200718_160045_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_160045_depth_crop_fill.png
|
454 |
+
TOFDC_split/Test/RGB/20200920_155235_rgb_crop.png,TOFDC_split/Test/GT/20200920_155235_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_155235_depth_crop_fill.png
|
455 |
+
TOFDC_split/Test/RGB/20200927_201812_rgb_crop.png,TOFDC_split/Test/GT/20200927_201812_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_201812_depth_crop_fill.png
|
456 |
+
TOFDC_split/Test/RGB/20200920_165234_rgb_crop.png,TOFDC_split/Test/GT/20200920_165234_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_165234_depth_crop_fill.png
|
457 |
+
TOFDC_split/Test/RGB/20200820_110758_rgb_crop.png,TOFDC_split/Test/GT/20200820_110758_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_110758_depth_crop_fill.png
|
458 |
+
TOFDC_split/Test/RGB/20200927_141503_rgb_crop.png,TOFDC_split/Test/GT/20200927_141503_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_141503_depth_crop_fill.png
|
459 |
+
TOFDC_split/Test/RGB/2020_09_11_19_32_37_601_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_19_32_37_601_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_19_32_37_601_rgb_depth_crop_fill.png
|
460 |
+
TOFDC_split/Test/RGB/2020_09_14_23_27_00_069_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_23_27_00_069_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_23_27_00_069_rgb_depth_crop_fill.png
|
461 |
+
TOFDC_split/Test/RGB/2020_09_23_22_00_56_293_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_23_22_00_56_293_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_23_22_00_56_293_rgb_depth_crop_fill.png
|
462 |
+
TOFDC_split/Test/RGB/2020_09_13_19_39_13_040_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_39_13_040_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_39_13_040_rgb_depth_crop_fill.png
|
463 |
+
TOFDC_split/Test/RGB/20200927_194952_rgb_crop.png,TOFDC_split/Test/GT/20200927_194952_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_194952_depth_crop_fill.png
|
464 |
+
TOFDC_split/Test/RGB/2020_09_12_20_05_16_368_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_05_16_368_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_05_16_368_rgb_depth_crop_fill.png
|
465 |
+
TOFDC_split/Test/RGB/2020_09_10_22_56_54_090_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_22_56_54_090_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_22_56_54_090_rgb_depth_crop_fill.png
|
466 |
+
TOFDC_split/Test/RGB/20200718_193838_rgb_crop.png,TOFDC_split/Test/GT/20200718_193838_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_193838_depth_crop_fill.png
|
467 |
+
TOFDC_split/Test/RGB/20200927_193414_rgb_crop.png,TOFDC_split/Test/GT/20200927_193414_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_193414_depth_crop_fill.png
|
468 |
+
TOFDC_split/Test/RGB/20200819_203755_rgb_crop.png,TOFDC_split/Test/GT/20200819_203755_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_203755_depth_crop_fill.png
|
469 |
+
TOFDC_split/Test/RGB/20200927_191236_rgb_crop.png,TOFDC_split/Test/GT/20200927_191236_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_191236_depth_crop_fill.png
|
470 |
+
TOFDC_split/Test/RGB/20200819_103927_rgb_crop.png,TOFDC_split/Test/GT/20200819_103927_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_103927_depth_crop_fill.png
|
471 |
+
TOFDC_split/Test/RGB/20200718_201858_rgb_crop.png,TOFDC_split/Test/GT/20200718_201858_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_201858_depth_crop_fill.png
|
472 |
+
TOFDC_split/Test/RGB/20200919_104745_rgb_crop.png,TOFDC_split/Test/GT/20200919_104745_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_104745_depth_crop_fill.png
|
473 |
+
TOFDC_split/Test/RGB/20200818_113846_rgb_crop.png,TOFDC_split/Test/GT/20200818_113846_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_113846_depth_crop_fill.png
|
474 |
+
TOFDC_split/Test/RGB/20200928_150528_rgb_crop.png,TOFDC_split/Test/GT/20200928_150528_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_150528_depth_crop_fill.png
|
475 |
+
TOFDC_split/Test/RGB/20200603_195754_rgb_crop.png,TOFDC_split/Test/GT/20200603_195754_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_195754_depth_crop_fill.png
|
476 |
+
TOFDC_split/Test/RGB/20200607_092259_rgb_crop.png,TOFDC_split/Test/GT/20200607_092259_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200607_092259_depth_crop_fill.png
|
477 |
+
TOFDC_split/Test/RGB/20200725_154655_rgb_crop.png,TOFDC_split/Test/GT/20200725_154655_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_154655_depth_crop_fill.png
|
478 |
+
TOFDC_split/Test/RGB/20200919_155325_rgb_crop.png,TOFDC_split/Test/GT/20200919_155325_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_155325_depth_crop_fill.png
|
479 |
+
TOFDC_split/Test/RGB/20200919_195123_rgb_crop.png,TOFDC_split/Test/GT/20200919_195123_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_195123_depth_crop_fill.png
|
480 |
+
TOFDC_split/Test/RGB/2020_09_21_20_03_59_031_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_20_03_59_031_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_20_03_59_031_rgb_depth_crop_fill.png
|
481 |
+
TOFDC_split/Test/RGB/2020_09_10_22_34_50_072_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_22_34_50_072_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_22_34_50_072_rgb_depth_crop_fill.png
|
482 |
+
TOFDC_split/Test/RGB/2020_09_12_16_23_05_697_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_16_23_05_697_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_16_23_05_697_rgb_depth_crop_fill.png
|
483 |
+
TOFDC_split/Test/RGB/2020_09_26_10_44_05_488_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_10_44_05_488_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_10_44_05_488_rgb_depth_crop_fill.png
|
484 |
+
TOFDC_split/Test/RGB/2020_09_14_19_42_05_367_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_19_42_05_367_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_19_42_05_367_rgb_depth_crop_fill.png
|
485 |
+
TOFDC_split/Test/RGB/2020_09_14_19_34_21_962_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_14_19_34_21_962_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_14_19_34_21_962_rgb_depth_crop_fill.png
|
486 |
+
TOFDC_split/Test/RGB/2020_09_09_14_49_11_337_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_14_49_11_337_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_14_49_11_337_rgb_depth_crop_fill.png
|
487 |
+
TOFDC_split/Test/RGB/2020_09_09_21_00_29_568_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_21_00_29_568_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_21_00_29_568_rgb_depth_crop_fill.png
|
488 |
+
TOFDC_split/Test/RGB/2020_09_26_14_26_08_445_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_14_26_08_445_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_14_26_08_445_rgb_depth_crop_fill.png
|
489 |
+
TOFDC_split/Test/RGB/2020_09_09_20_58_26_019_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_20_58_26_019_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_20_58_26_019_rgb_depth_crop_fill.png
|
490 |
+
TOFDC_split/Test/RGB/2020_09_12_11_54_38_467_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_11_54_38_467_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_11_54_38_467_rgb_depth_crop_fill.png
|
491 |
+
TOFDC_split/Test/RGB/20200603_110751_rgb_crop.png,TOFDC_split/Test/GT/20200603_110751_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_110751_depth_crop_fill.png
|
492 |
+
TOFDC_split/Test/RGB/20200601_190016_rgb_crop.png,TOFDC_split/Test/GT/20200601_190016_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_190016_depth_crop_fill.png
|
493 |
+
TOFDC_split/Test/RGB/20200919_112001_rgb_crop.png,TOFDC_split/Test/GT/20200919_112001_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_112001_depth_crop_fill.png
|
494 |
+
TOFDC_split/Test/RGB/20200602_100348_rgb_crop.png,TOFDC_split/Test/GT/20200602_100348_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200602_100348_depth_crop_fill.png
|
495 |
+
TOFDC_split/Test/RGB/20200604_164300_rgb_crop.png,TOFDC_split/Test/GT/20200604_164300_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200604_164300_depth_crop_fill.png
|
496 |
+
TOFDC_split/Test/RGB/20200927_200211_rgb_crop.png,TOFDC_split/Test/GT/20200927_200211_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_200211_depth_crop_fill.png
|
497 |
+
TOFDC_split/Test/RGB/2020_09_09_21_25_08_609_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_21_25_08_609_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_21_25_08_609_rgb_depth_crop_fill.png
|
498 |
+
TOFDC_split/Test/RGB/2020_09_22_10_58_03_795_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_58_03_795_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_58_03_795_rgb_depth_crop_fill.png
|
499 |
+
TOFDC_split/Test/RGB/2020_09_11_17_27_16_826_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_17_27_16_826_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_17_27_16_826_rgb_depth_crop_fill.png
|
500 |
+
TOFDC_split/Test/RGB/20200927_155151_rgb_crop.png,TOFDC_split/Test/GT/20200927_155151_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_155151_depth_crop_fill.png
|
501 |
+
TOFDC_split/Test/RGB/20200818_213742_rgb_crop.png,TOFDC_split/Test/GT/20200818_213742_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_213742_depth_crop_fill.png
|
502 |
+
TOFDC_split/Test/RGB/20200716_095332_rgb_crop.png,TOFDC_split/Test/GT/20200716_095332_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200716_095332_depth_crop_fill.png
|
503 |
+
TOFDC_split/Test/RGB/20200820_165419_rgb_crop.png,TOFDC_split/Test/GT/20200820_165419_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200820_165419_depth_crop_fill.png
|
504 |
+
TOFDC_split/Test/RGB/20200818_152524_rgb_crop.png,TOFDC_split/Test/GT/20200818_152524_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_152524_depth_crop_fill.png
|
505 |
+
TOFDC_split/Test/RGB/20200721_144926_rgb_crop.png,TOFDC_split/Test/GT/20200721_144926_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_144926_depth_crop_fill.png
|
506 |
+
TOFDC_split/Test/RGB/2020_09_12_20_42_13_700_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_42_13_700_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_42_13_700_rgb_depth_crop_fill.png
|
507 |
+
TOFDC_split/Test/RGB/20200927_204750_rgb_crop.png,TOFDC_split/Test/GT/20200927_204750_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_204750_depth_crop_fill.png
|
508 |
+
TOFDC_split/Test/RGB/20200928_153521_rgb_crop.png,TOFDC_split/Test/GT/20200928_153521_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200928_153521_depth_crop_fill.png
|
509 |
+
TOFDC_split/Test/RGB/20200727_170228_rgb_crop.png,TOFDC_split/Test/GT/20200727_170228_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_170228_depth_crop_fill.png
|
510 |
+
TOFDC_split/Test/RGB/20200927_192222_rgb_crop.png,TOFDC_split/Test/GT/20200927_192222_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_192222_depth_crop_fill.png
|
511 |
+
TOFDC_split/Test/RGB/2020_09_22_10_15_20_018_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_10_15_20_018_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_10_15_20_018_rgb_depth_crop_fill.png
|
512 |
+
TOFDC_split/Test/RGB/20200727_173216_rgb_crop.png,TOFDC_split/Test/GT/20200727_173216_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200727_173216_depth_crop_fill.png
|
513 |
+
TOFDC_split/Test/RGB/20200725_154516_rgb_crop.png,TOFDC_split/Test/GT/20200725_154516_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_154516_depth_crop_fill.png
|
514 |
+
TOFDC_split/Test/RGB/2020_09_12_20_31_01_219_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_31_01_219_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_31_01_219_rgb_depth_crop_fill.png
|
515 |
+
TOFDC_split/Test/RGB/20200819_104229_rgb_crop.png,TOFDC_split/Test/GT/20200819_104229_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_104229_depth_crop_fill.png
|
516 |
+
TOFDC_split/Test/RGB/20200920_170926_rgb_crop.png,TOFDC_split/Test/GT/20200920_170926_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_170926_depth_crop_fill.png
|
517 |
+
TOFDC_split/Test/RGB/20200903_160917_rgb_crop.png,TOFDC_split/Test/GT/20200903_160917_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200903_160917_depth_crop_fill.png
|
518 |
+
TOFDC_split/Test/RGB/20200819_201906_rgb_crop.png,TOFDC_split/Test/GT/20200819_201906_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200819_201906_depth_crop_fill.png
|
519 |
+
TOFDC_split/Test/RGB/20200717_193107_rgb_crop.png,TOFDC_split/Test/GT/20200717_193107_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_193107_depth_crop_fill.png
|
520 |
+
TOFDC_split/Test/RGB/20200719_094105_rgb_crop.png,TOFDC_split/Test/GT/20200719_094105_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200719_094105_depth_crop_fill.png
|
521 |
+
TOFDC_split/Test/RGB/20200927_202626_rgb_crop.png,TOFDC_split/Test/GT/20200927_202626_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_202626_depth_crop_fill.png
|
522 |
+
TOFDC_split/Test/RGB/2020_09_08_11_02_59_336_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_11_02_59_336_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_11_02_59_336_rgb_depth_crop_fill.png
|
523 |
+
TOFDC_split/Test/RGB/2020_09_08_17_44_15_116_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_17_44_15_116_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_17_44_15_116_rgb_depth_crop_fill.png
|
524 |
+
TOFDC_split/Test/RGB/2020_09_12_20_47_27_409_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_20_47_27_409_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_20_47_27_409_rgb_depth_crop_fill.png
|
525 |
+
TOFDC_split/Test/RGB/2020_09_11_21_56_01_588_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_11_21_56_01_588_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_11_21_56_01_588_rgb_depth_crop_fill.png
|
526 |
+
TOFDC_split/Test/RGB/2020_09_10_15_34_37_293_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_10_15_34_37_293_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_10_15_34_37_293_rgb_depth_crop_fill.png
|
527 |
+
TOFDC_split/Test/RGB/2020_09_26_15_03_55_533_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_26_15_03_55_533_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_26_15_03_55_533_rgb_depth_crop_fill.png
|
528 |
+
TOFDC_split/Test/RGB/2020_09_09_21_18_08_414_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_21_18_08_414_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_21_18_08_414_rgb_depth_crop_fill.png
|
529 |
+
TOFDC_split/Test/RGB/2020_09_12_15_38_42_237_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_15_38_42_237_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_15_38_42_237_rgb_depth_crop_fill.png
|
530 |
+
TOFDC_split/Test/RGB/2020_09_12_15_17_16_617_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_15_17_16_617_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_15_17_16_617_rgb_depth_crop_fill.png
|
531 |
+
TOFDC_split/Test/RGB/20200818_215847_rgb_crop.png,TOFDC_split/Test/GT/20200818_215847_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_215847_depth_crop_fill.png
|
532 |
+
TOFDC_split/Test/RGB/20200818_145458_rgb_crop.png,TOFDC_split/Test/GT/20200818_145458_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_145458_depth_crop_fill.png
|
533 |
+
TOFDC_split/Test/RGB/2020_09_12_11_06_14_851_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_12_11_06_14_851_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_12_11_06_14_851_rgb_depth_crop_fill.png
|
534 |
+
TOFDC_split/Test/RGB/2020_09_13_19_13_22_503_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_13_19_13_22_503_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_13_19_13_22_503_rgb_depth_crop_fill.png
|
535 |
+
TOFDC_split/Test/RGB/20200927_154637_rgb_crop.png,TOFDC_split/Test/GT/20200927_154637_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_154637_depth_crop_fill.png
|
536 |
+
TOFDC_split/Test/RGB/20200919_191743_rgb_crop.png,TOFDC_split/Test/GT/20200919_191743_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200919_191743_depth_crop_fill.png
|
537 |
+
TOFDC_split/Test/RGB/20200824_165648_rgb_crop.png,TOFDC_split/Test/GT/20200824_165648_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200824_165648_depth_crop_fill.png
|
538 |
+
TOFDC_split/Test/RGB/20200927_195308_rgb_crop.png,TOFDC_split/Test/GT/20200927_195308_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200927_195308_depth_crop_fill.png
|
539 |
+
TOFDC_split/Test/RGB/2020_09_09_11_00_54_989_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_11_00_54_989_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_11_00_54_989_rgb_depth_crop_fill.png
|
540 |
+
TOFDC_split/Test/RGB/2020_09_21_20_29_27_975_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_21_20_29_27_975_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_21_20_29_27_975_rgb_depth_crop_fill.png
|
541 |
+
TOFDC_split/Test/RGB/20200723_105307_rgb_crop.png,TOFDC_split/Test/GT/20200723_105307_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_105307_depth_crop_fill.png
|
542 |
+
TOFDC_split/Test/RGB/20200720_154423_rgb_crop.png,TOFDC_split/Test/GT/20200720_154423_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200720_154423_depth_crop_fill.png
|
543 |
+
TOFDC_split/Test/RGB/2020_09_08_17_22_53_558_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_17_22_53_558_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_17_22_53_558_rgb_depth_crop_fill.png
|
544 |
+
TOFDC_split/Test/RGB/2020_09_22_16_18_39_356_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_22_16_18_39_356_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_22_16_18_39_356_rgb_depth_crop_fill.png
|
545 |
+
TOFDC_split/Test/RGB/20200818_203330_rgb_crop.png,TOFDC_split/Test/GT/20200818_203330_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_203330_depth_crop_fill.png
|
546 |
+
TOFDC_split/Test/RGB/20200717_105043_rgb_crop.png,TOFDC_split/Test/GT/20200717_105043_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_105043_depth_crop_fill.png
|
547 |
+
TOFDC_split/Test/RGB/20200920_112805_rgb_crop.png,TOFDC_split/Test/GT/20200920_112805_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200920_112805_depth_crop_fill.png
|
548 |
+
TOFDC_split/Test/RGB/20200723_110356_rgb_crop.png,TOFDC_split/Test/GT/20200723_110356_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200723_110356_depth_crop_fill.png
|
549 |
+
TOFDC_split/Test/RGB/20200601_171237_rgb_crop.png,TOFDC_split/Test/GT/20200601_171237_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200601_171237_depth_crop_fill.png
|
550 |
+
TOFDC_split/Test/RGB/2020_09_09_15_15_11_689_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_09_15_15_11_689_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_09_15_15_11_689_rgb_depth_crop_fill.png
|
551 |
+
TOFDC_split/Test/RGB/20200717_161032_rgb_crop.png,TOFDC_split/Test/GT/20200717_161032_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200717_161032_depth_crop_fill.png
|
552 |
+
TOFDC_split/Test/RGB/20200725_163031_rgb_crop.png,TOFDC_split/Test/GT/20200725_163031_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_163031_depth_crop_fill.png
|
553 |
+
TOFDC_split/Test/RGB/20200718_100632_rgb_crop.png,TOFDC_split/Test/GT/20200718_100632_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200718_100632_depth_crop_fill.png
|
554 |
+
TOFDC_split/Test/RGB/20200721_152622_rgb_crop.png,TOFDC_split/Test/GT/20200721_152622_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_152622_depth_crop_fill.png
|
555 |
+
TOFDC_split/Test/RGB/2020_09_08_16_24_37_249_rgb_rgb_crop.png,TOFDC_split/Test/GT/2020_09_08_16_24_37_249_rgb_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/2020_09_08_16_24_37_249_rgb_depth_crop_fill.png
|
556 |
+
TOFDC_split/Test/RGB/20200818_203600_rgb_crop.png,TOFDC_split/Test/GT/20200818_203600_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200818_203600_depth_crop_fill.png
|
557 |
+
TOFDC_split/Test/RGB/20200721_164403_rgb_crop.png,TOFDC_split/Test/GT/20200721_164403_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200721_164403_depth_crop_fill.png
|
558 |
+
TOFDC_split/Test/RGB/20200716_201254_rgb_crop.png,TOFDC_split/Test/GT/20200716_201254_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200716_201254_depth_crop_fill.png
|
559 |
+
TOFDC_split/Test/RGB/20200725_164021_rgb_crop.png,TOFDC_split/Test/GT/20200725_164021_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200725_164021_depth_crop_fill.png
|
560 |
+
TOFDC_split/Test/RGB/20200603_202348_rgb_crop.png,TOFDC_split/Test/GT/20200603_202348_tgv_gt_crop.png,TOFDC_split/Test/LR_Filled/20200603_202348_depth_crop_fill.png
|
data/TOFDSR_Train.txt
ADDED
The diff for this file is too large to render.
See raw diff
|
|
data/__pycache__/nyu_dataloader.cpython-311.pyc
ADDED
Binary file (3.04 kB). View file
|
|
data/__pycache__/rgbdd_dataloader.cpython-311.pyc
ADDED
Binary file (7.64 kB). View file
|
|
data/nyu_dataloader.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from torch.utils.data import Dataset
|
2 |
+
from PIL import Image
|
3 |
+
import numpy as np
|
4 |
+
|
5 |
+
|
6 |
+
class NYU_v2_datset(Dataset):
|
7 |
+
"""NYUDataset."""
|
8 |
+
|
9 |
+
def __init__(self, root_dir, scale=8, train=True, transform=None):
|
10 |
+
"""
|
11 |
+
Args:
|
12 |
+
root_dir (string): Directory with all the images.
|
13 |
+
scale (float): dataset scale
|
14 |
+
train (bool): train or test
|
15 |
+
transform (callable, optional): Optional transform to be applied on a sample.
|
16 |
+
|
17 |
+
"""
|
18 |
+
self.root_dir = root_dir
|
19 |
+
self.transform = transform
|
20 |
+
self.scale = scale
|
21 |
+
self.train = train
|
22 |
+
|
23 |
+
if train:
|
24 |
+
self.depths = np.load('%s/train_depth_split.npy' % root_dir)
|
25 |
+
self.images = np.load('%s/train_images_split.npy' % root_dir)
|
26 |
+
else:
|
27 |
+
self.depths = np.load('%s/test_depth.npy' % root_dir)
|
28 |
+
self.images = np.load('%s/test_images_v2.npy' % root_dir)
|
29 |
+
|
30 |
+
def __len__(self):
|
31 |
+
return self.depths.shape[0]
|
32 |
+
|
33 |
+
def __getitem__(self, idx):
|
34 |
+
depth = self.depths[idx]
|
35 |
+
image = self.images[idx]
|
36 |
+
h, w = depth.shape[:2]
|
37 |
+
s = self.scale
|
38 |
+
lr = np.array(Image.fromarray(depth.squeeze()).resize((w // s, h // s), Image.BICUBIC).resize((w, h), Image.BICUBIC))
|
39 |
+
|
40 |
+
if self.transform:
|
41 |
+
image = self.transform(image).float()
|
42 |
+
depth = self.transform(depth).float()
|
43 |
+
lr = self.transform(np.expand_dims(lr, 2)).float()
|
44 |
+
|
45 |
+
sample = {'guidance': image, 'lr': lr, 'gt': depth}
|
46 |
+
|
47 |
+
return sample
|
data/rgbdd_dataloader.py
ADDED
@@ -0,0 +1,118 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import os
|
3 |
+
|
4 |
+
from torch.utils.data import Dataset
|
5 |
+
from PIL import Image
|
6 |
+
from scipy.ndimage import gaussian_filter
|
7 |
+
|
8 |
+
|
9 |
+
class RGBDD_Dataset(Dataset):
|
10 |
+
"""RGB-D-D Dataset."""
|
11 |
+
|
12 |
+
def __init__(self, root_dir="./dataset/RGB-D-D/", scale=4, downsample='real', train=True,
|
13 |
+
transform=None, isNoisy=False, blur_sigma=1.2):
|
14 |
+
|
15 |
+
self.root_dir = root_dir
|
16 |
+
self.transform = transform
|
17 |
+
self.scale = scale
|
18 |
+
self.downsample = downsample
|
19 |
+
self.train = train
|
20 |
+
self.isNoisy = isNoisy
|
21 |
+
self.blur_sigma = blur_sigma
|
22 |
+
|
23 |
+
types = ['models', 'plants', 'portraits']
|
24 |
+
|
25 |
+
if train:
|
26 |
+
if self.downsample == 'real':
|
27 |
+
self.GTs = []
|
28 |
+
self.LRs = []
|
29 |
+
self.RGBs = []
|
30 |
+
for type in types:
|
31 |
+
list_dir = os.listdir('%s/%s/%s_train'% (root_dir, type, type))
|
32 |
+
for n in list_dir:
|
33 |
+
self.RGBs.append('%s/%s/%s_train/%s/%s_RGB.jpg' % (root_dir, type, type, n, n))
|
34 |
+
self.GTs.append('%s/%s/%s_train/%s/%s_HR_gt.png' % (root_dir, type, type, n, n))
|
35 |
+
self.LRs.append('%s/%s/%s_train/%s/%s_LR_fill_depth.png' % (root_dir, type, type, n, n))
|
36 |
+
else:
|
37 |
+
self.GTs = []
|
38 |
+
self.RGBs = []
|
39 |
+
for type in types:
|
40 |
+
list_dir = os.listdir('%s/%s/%s_train'% (root_dir, type, type))
|
41 |
+
for n in list_dir:
|
42 |
+
self.RGBs.append('%s/%s/%s_train/%s/%s_RGB.jpg' % (root_dir, type, type, n, n))
|
43 |
+
self.GTs.append('%s/%s/%s_train/%s/%s_HR_gt.png' % (root_dir, type, type, n, n))
|
44 |
+
|
45 |
+
else:
|
46 |
+
if self.downsample == 'real':
|
47 |
+
self.GTs = []
|
48 |
+
self.LRs = []
|
49 |
+
self.RGBs = []
|
50 |
+
for type in types:
|
51 |
+
list_dir = os.listdir('%s/%s/%s_test'% (root_dir, type, type))
|
52 |
+
for n in list_dir:
|
53 |
+
self.RGBs.append('%s/%s/%s_test/%s/%s_RGB.jpg' % (root_dir, type, type, n, n))
|
54 |
+
self.GTs.append('%s/%s/%s_test/%s/%s_HR_gt.png' % (root_dir, type, type, n, n))
|
55 |
+
self.LRs.append('%s/%s/%s_test/%s/%s_LR_fill_depth.png' % (root_dir, type, type, n, n))
|
56 |
+
else:
|
57 |
+
self.GTs = []
|
58 |
+
self.RGBs = []
|
59 |
+
for type in types:
|
60 |
+
list_dir = os.listdir('%s/%s/%s_test'% (root_dir, type, type))
|
61 |
+
for n in list_dir:
|
62 |
+
self.RGBs.append('%s/%s/%s_test/%s/%s_RGB.jpg' % (root_dir, type, type, n, n))
|
63 |
+
self.GTs.append('%s/%s/%s_test/%s/%s_HR_gt.png' % (root_dir, type, type, n, n))
|
64 |
+
|
65 |
+
def __len__(self):
|
66 |
+
return len(self.GTs)
|
67 |
+
|
68 |
+
def __getitem__(self, idx):
|
69 |
+
if self.downsample == 'real':
|
70 |
+
image = np.array(Image.open(self.RGBs[idx]).convert("RGB")).astype(np.float32)
|
71 |
+
name = self.RGBs[idx][-22:-8]
|
72 |
+
gt = np.array(Image.open(self.GTs[idx])).astype(np.float32)
|
73 |
+
h, w = gt.shape
|
74 |
+
lr = np.array(Image.open(self.LRs[idx]).resize((w, h), Image.BICUBIC)).astype(np.float32)
|
75 |
+
else:
|
76 |
+
image = Image.open(self.RGBs[idx]).convert("RGB")
|
77 |
+
name = self.RGBs[idx][-22:-8]
|
78 |
+
image = np.array(image).astype(np.float32)
|
79 |
+
gt = Image.open(self.GTs[idx])
|
80 |
+
w, h = gt.size
|
81 |
+
s = self.scale
|
82 |
+
lr = np.array(gt.resize((w // s, h // s), Image.BICUBIC).resize((w, h), Image.BICUBIC)).astype(np.float32)
|
83 |
+
gt = np.array(gt).astype(np.float32)
|
84 |
+
|
85 |
+
# normalization
|
86 |
+
if self.train:
|
87 |
+
max_out = 5000.0
|
88 |
+
min_out = 0.0
|
89 |
+
lr = (lr - min_out) / (max_out - min_out)
|
90 |
+
gt = (gt-min_out)/(max_out-min_out)
|
91 |
+
else:
|
92 |
+
max_out = 5000.0
|
93 |
+
min_out = 0.0
|
94 |
+
lr = (lr - min_out) / (max_out - min_out)
|
95 |
+
|
96 |
+
maxx = np.max(image)
|
97 |
+
minn = np.min(image)
|
98 |
+
image = (image - minn) / (maxx - minn)
|
99 |
+
|
100 |
+
lr_minn = np.min(lr)
|
101 |
+
lr_maxx = np.max(lr)
|
102 |
+
|
103 |
+
if not self.train:
|
104 |
+
np.random.seed(42)
|
105 |
+
|
106 |
+
if self.isNoisy:
|
107 |
+
lr = gaussian_filter(lr, sigma=self.blur_sigma)
|
108 |
+
|
109 |
+
gaussian_noise = np.random.normal(0, 0.07, lr.shape)
|
110 |
+
lr = lr + gaussian_noise
|
111 |
+
lr = np.clip(lr, lr_minn, lr_maxx)
|
112 |
+
|
113 |
+
image = self.transform(image).float()
|
114 |
+
gt = self.transform(np.expand_dims(gt, 2)).float()
|
115 |
+
lr = self.transform(np.expand_dims(lr, 2)).float()
|
116 |
+
sample = {'guidance': image, 'lr': lr, 'gt': gt, 'max': max_out, 'min': min_out, 'name':name}
|
117 |
+
|
118 |
+
return sample
|
data/tofdc_dataloader.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import os
|
3 |
+
|
4 |
+
from torch.utils.data import Dataset, DataLoader
|
5 |
+
from PIL import Image
|
6 |
+
from scipy.ndimage import gaussian_filter
|
7 |
+
|
8 |
+
|
9 |
+
class TOFDSR_Dataset(Dataset):
|
10 |
+
|
11 |
+
def __init__(self, root_dir="./dataset/", scale=4, downsample='real', train=True, txt_file='./TOFDSR_Train.txt' ,
|
12 |
+
transform=None, isNoisy=False, blur_sigma=1.2):
|
13 |
+
|
14 |
+
self.root_dir = root_dir
|
15 |
+
self.transform = transform
|
16 |
+
self.scale = scale
|
17 |
+
self.downsample = downsample
|
18 |
+
self.train = train
|
19 |
+
self.isNoisy = isNoisy
|
20 |
+
self.blur_sigma = blur_sigma
|
21 |
+
self.image_list = txt_file
|
22 |
+
|
23 |
+
with open(self.image_list, 'r') as f:
|
24 |
+
self.filename = f.readlines()
|
25 |
+
|
26 |
+
def __len__(self):
|
27 |
+
return len(self.filename)
|
28 |
+
|
29 |
+
def __getitem__(self, idx):
|
30 |
+
|
31 |
+
sample_path = self.filename[idx].strip('\n')
|
32 |
+
sample_path_ = sample_path.split(',')
|
33 |
+
rgb_path = sample_path_[0]
|
34 |
+
gt_path = sample_path_[1]
|
35 |
+
lr_path = sample_path_[2]
|
36 |
+
name = gt_path[20:-4]
|
37 |
+
|
38 |
+
rgb_path = os.path.join(self.root_dir, rgb_path)
|
39 |
+
gt_path = os.path.join(self.root_dir, gt_path)
|
40 |
+
lr_path = os.path.join(self.root_dir, lr_path)
|
41 |
+
|
42 |
+
if self.downsample == 'real':
|
43 |
+
image = np.array(Image.open(rgb_path).convert("RGB")).astype(np.float32)
|
44 |
+
gt = np.array(Image.open(gt_path)).astype(np.float32)
|
45 |
+
h, w = gt.shape
|
46 |
+
lr = np.array(Image.open(lr_path).resize((w, h), Image.BICUBIC)).astype(np.float32)
|
47 |
+
|
48 |
+
else:
|
49 |
+
image = np.array(Image.open(rgb_path).convert("RGB")).astype(np.float32)
|
50 |
+
gt = Image.open(gt_path)
|
51 |
+
w, h = gt.size
|
52 |
+
lr = np.array(gt.resize((w, h), Image.BICUBIC)).astype(np.float32)
|
53 |
+
gt = np.array(gt).astype(np.float32)
|
54 |
+
|
55 |
+
image_max = np.max(image)
|
56 |
+
image_min = np.min(image)
|
57 |
+
image = (image - image_min) / (image_max - image_min)
|
58 |
+
|
59 |
+
# normalization
|
60 |
+
if self.train:
|
61 |
+
max_out = 5000.0
|
62 |
+
min_out = 0.0
|
63 |
+
lr = (lr - min_out) / (max_out - min_out)
|
64 |
+
gt = (gt-min_out)/(max_out-min_out)
|
65 |
+
else:
|
66 |
+
max_out = 5000.0
|
67 |
+
min_out = 0.0
|
68 |
+
lr = (lr - min_out) / (max_out - min_out)
|
69 |
+
|
70 |
+
lr_minn = np.min(lr)
|
71 |
+
lr_maxx = np.max(lr)
|
72 |
+
|
73 |
+
if not self.train:
|
74 |
+
np.random.seed(42)
|
75 |
+
|
76 |
+
if self.isNoisy:
|
77 |
+
lr = gaussian_filter(lr, sigma=self.blur_sigma)
|
78 |
+
|
79 |
+
gaussian_noise = np.random.normal(0, 0.07, lr.shape)
|
80 |
+
lr = lr + gaussian_noise
|
81 |
+
lr = np.clip(lr, lr_minn, lr_maxx)
|
82 |
+
|
83 |
+
if self.transform:
|
84 |
+
image = self.transform(image).float()
|
85 |
+
gt = self.transform(np.expand_dims(gt, 2)).float()
|
86 |
+
lr = self.transform(np.expand_dims(lr, 2)).float()
|
87 |
+
|
88 |
+
sample = {'guidance': image, 'lr': lr, 'gt': gt, 'max': max_out, 'min': min_out,'name': name}
|
89 |
+
|
90 |
+
return sample
|
examples/RGB-D-D/20200518160957_LR_fill_depth.png
ADDED
![]() |
examples/RGB-D-D/20200518160957_RGB.jpg
ADDED
![]() |
examples/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png
ADDED
![]() |
examples/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png
ADDED
![]() |
Git LFS Details
|
net/CR.py
ADDED
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch.nn as nn
|
2 |
+
import torch
|
3 |
+
from torchvision import models
|
4 |
+
|
5 |
+
class Vgg19(torch.nn.Module):
|
6 |
+
def __init__(self, requires_grad=False):
|
7 |
+
super(Vgg19, self).__init__()
|
8 |
+
vgg_pretrained_features = models.vgg19(pretrained=True).features
|
9 |
+
self.slice1 = torch.nn.Sequential()
|
10 |
+
self.slice2 = torch.nn.Sequential()
|
11 |
+
self.slice3 = torch.nn.Sequential()
|
12 |
+
self.slice4 = torch.nn.Sequential()
|
13 |
+
self.slice5 = torch.nn.Sequential()
|
14 |
+
for x in range(2):
|
15 |
+
self.slice1.add_module(str(x), vgg_pretrained_features[x])
|
16 |
+
for x in range(2, 7):
|
17 |
+
self.slice2.add_module(str(x), vgg_pretrained_features[x])
|
18 |
+
for x in range(7, 12):
|
19 |
+
self.slice3.add_module(str(x), vgg_pretrained_features[x])
|
20 |
+
for x in range(12, 21):
|
21 |
+
self.slice4.add_module(str(x), vgg_pretrained_features[x])
|
22 |
+
for x in range(21, 30):
|
23 |
+
self.slice5.add_module(str(x), vgg_pretrained_features[x])
|
24 |
+
if not requires_grad:
|
25 |
+
for param in self.parameters():
|
26 |
+
param.requires_grad = False
|
27 |
+
|
28 |
+
def forward(self, X):
|
29 |
+
h_relu1 = self.slice1(X)
|
30 |
+
h_relu2 = self.slice2(h_relu1)
|
31 |
+
h_relu3 = self.slice3(h_relu2)
|
32 |
+
h_relu4 = self.slice4(h_relu3)
|
33 |
+
h_relu5 = self.slice5(h_relu4)
|
34 |
+
return [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5]
|
35 |
+
|
36 |
+
class ContrastLoss(nn.Module):
|
37 |
+
def __init__(self, ablation=False):
|
38 |
+
|
39 |
+
super(ContrastLoss, self).__init__()
|
40 |
+
self.vgg = Vgg19().cuda()
|
41 |
+
self.l1 = nn.L1Loss()
|
42 |
+
self.weights = [1.0/32, 1.0/16, 1.0/8, 1.0/4, 1.0]
|
43 |
+
self.ab = ablation
|
44 |
+
|
45 |
+
def forward(self, a, p, n):
|
46 |
+
|
47 |
+
a_re = a.repeat(1, 3, 1, 1)
|
48 |
+
p_re = p.repeat(1, 3, 1, 1)
|
49 |
+
n_re = n.repeat(1, 3, 1, 1)
|
50 |
+
a_vgg, p_vgg, n_vgg = self.vgg(a_re), self.vgg(p_re), self.vgg(n_re)
|
51 |
+
loss = 0
|
52 |
+
|
53 |
+
d_ap, d_an = 0, 0
|
54 |
+
for i in range(len(a_vgg)):
|
55 |
+
d_ap = self.l1(a_vgg[i], p_vgg[i].detach())
|
56 |
+
if not self.ab:
|
57 |
+
d_an = self.l1(a_vgg[i], n_vgg[i].detach())
|
58 |
+
contrastive = d_ap / (d_an + 1e-7)
|
59 |
+
else:
|
60 |
+
contrastive = d_ap
|
61 |
+
|
62 |
+
loss += self.weights[i] * contrastive
|
63 |
+
return loss
|
net/__pycache__/CR.cpython-311.pyc
ADDED
Binary file (5.26 kB). View file
|
|
net/__pycache__/deform_conv.cpython-311.pyc
ADDED
Binary file (5.2 kB). View file
|
|
net/__pycache__/dornet.cpython-311.pyc
ADDED
Binary file (38.3 kB). View file
|
|
net/__pycache__/dornet_ddp.cpython-311.pyc
ADDED
Binary file (39.2 kB). View file
|
|
net/deform_conv.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import math
|
2 |
+
|
3 |
+
import torch
|
4 |
+
import torch.nn as nn
|
5 |
+
from torch.nn.modules.utils import _pair
|
6 |
+
from mmcv.ops import modulated_deform_conv2d
|
7 |
+
|
8 |
+
class DCN_layer_rgb(nn.Module):
|
9 |
+
def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1,
|
10 |
+
groups=1, deformable_groups=1, bias=True, extra_offset_mask=True):
|
11 |
+
super(DCN_layer_rgb, self).__init__()
|
12 |
+
self.in_channels = in_channels
|
13 |
+
self.out_channels = out_channels
|
14 |
+
self.kernel_size = _pair(kernel_size)
|
15 |
+
self.stride = stride
|
16 |
+
self.padding = padding
|
17 |
+
self.dilation = dilation
|
18 |
+
self.groups = groups
|
19 |
+
self.deformable_groups = deformable_groups
|
20 |
+
self.with_bias = bias
|
21 |
+
|
22 |
+
self.weight = nn.Parameter(
|
23 |
+
torch.Tensor(out_channels, in_channels, *self.kernel_size))
|
24 |
+
|
25 |
+
self.extra_offset_mask = extra_offset_mask
|
26 |
+
self.conv_offset_mask = nn.Conv2d(
|
27 |
+
self.in_channels,
|
28 |
+
self.deformable_groups * 3 * self.kernel_size[0] * self.kernel_size[1],
|
29 |
+
kernel_size=self.kernel_size, stride=_pair(self.stride), padding=_pair(self.padding),
|
30 |
+
bias=True
|
31 |
+
)
|
32 |
+
|
33 |
+
self.c1 = nn.Conv2d(in_channels*4, out_channels, 1, 1, 0, bias=False)
|
34 |
+
self.c2 = nn.Conv2d(out_channels, out_channels, 1, 1, 0, bias=False)
|
35 |
+
|
36 |
+
if bias:
|
37 |
+
self.bias = nn.Parameter(torch.Tensor(out_channels))
|
38 |
+
else:
|
39 |
+
self.register_parameter('bias', None)
|
40 |
+
|
41 |
+
self.init_offset()
|
42 |
+
self.reset_parameters()
|
43 |
+
|
44 |
+
def reset_parameters(self):
|
45 |
+
n = self.in_channels
|
46 |
+
for k in self.kernel_size:
|
47 |
+
n *= k
|
48 |
+
stdv = 1. / math.sqrt(n)
|
49 |
+
self.weight.data.uniform_(-stdv, stdv)
|
50 |
+
if self.bias is not None:
|
51 |
+
self.bias.data.zero_()
|
52 |
+
|
53 |
+
def init_offset(self):
|
54 |
+
self.conv_offset_mask.weight.data.zero_()
|
55 |
+
self.conv_offset_mask.bias.data.zero_()
|
56 |
+
|
57 |
+
def forward(self, input_feat, inter, fea):
|
58 |
+
b, c, h, w = input_feat.shape
|
59 |
+
fea = self.c1(fea).unsqueeze(1)
|
60 |
+
weight = self.weight.unsqueeze(0) * fea
|
61 |
+
weight = weight.view(b * self.out_channels, self.in_channels, self.kernel_size[0],
|
62 |
+
self.kernel_size[1]).contiguous()
|
63 |
+
input_feat = input_feat.view(1, b * self.in_channels, h, w)
|
64 |
+
|
65 |
+
out = self.conv_offset_mask(inter)
|
66 |
+
o1, o2, mask = torch.chunk(out, 3, dim=1)
|
67 |
+
offset = torch.cat((o1, o2), dim=1)
|
68 |
+
mask = torch.sigmoid(mask)
|
69 |
+
|
70 |
+
out = modulated_deform_conv2d(input_feat.contiguous(), offset, mask, weight, self.bias, self.stride,
|
71 |
+
self.padding, self.dilation, b, b)
|
72 |
+
_, _, height, width = out.shape
|
73 |
+
out = out.view(b, self.out_channels, height, width).contiguous()
|
74 |
+
out2 = self.c2(out)
|
75 |
+
return out2
|
net/dornet.py
ADDED
@@ -0,0 +1,586 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
from .deform_conv import DCN_layer_rgb
|
4 |
+
import torch.nn.functional as F
|
5 |
+
import math
|
6 |
+
|
7 |
+
from torch.distributions.normal import Normal
|
8 |
+
import numpy as np
|
9 |
+
|
10 |
+
|
11 |
+
class SparseDispatcher(object):
|
12 |
+
"""Helper for implementing a mixture of experts.
|
13 |
+
The purpose of this class is to create input minibatches for the
|
14 |
+
experts and to combine the results of the experts to form a unified
|
15 |
+
output tensor.
|
16 |
+
There are two functions:
|
17 |
+
dispatch - take an input Tensor and create input Tensors for each expert.
|
18 |
+
combine - take output Tensors from each expert and form a combined output
|
19 |
+
Tensor. Outputs from different experts for the same batch element are
|
20 |
+
summed together, weighted by the provided "gates".
|
21 |
+
The class is initialized with a "gates" Tensor, which specifies which
|
22 |
+
batch elements go to which experts, and the weights to use when combining
|
23 |
+
the outputs. Batch element b is sent to expert e iff gates[b, e] != 0.
|
24 |
+
The inputs and outputs are all two-dimensional [batch, depth].
|
25 |
+
Caller is responsible for collapsing additional dimensions prior to
|
26 |
+
calling this class and reshaping the output to the original shape.
|
27 |
+
See common_layers.reshape_like().
|
28 |
+
Example use:
|
29 |
+
gates: a float32 `Tensor` with shape `[batch_size, num_experts]`
|
30 |
+
inputs: a float32 `Tensor` with shape `[batch_size, input_size]`
|
31 |
+
experts: a list of length `num_experts` containing sub-networks.
|
32 |
+
dispatcher = SparseDispatcher(num_experts, gates)
|
33 |
+
expert_inputs = dispatcher.dispatch(inputs)
|
34 |
+
expert_outputs = [experts[i](expert_inputs[i]) for i in range(num_experts)]
|
35 |
+
outputs = dispatcher.combine(expert_outputs)
|
36 |
+
The preceding code sets the output for a particular example b to:
|
37 |
+
output[b] = Sum_i(gates[b, i] * experts[i](inputs[b]))
|
38 |
+
This class takes advantage of sparsity in the gate matrix by including in the
|
39 |
+
`Tensor`s for expert i only the batch elements for which `gates[b, i] > 0`.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, num_experts, gates):
|
43 |
+
"""Create a SparseDispatcher."""
|
44 |
+
|
45 |
+
self._gates = gates
|
46 |
+
self._num_experts = num_experts
|
47 |
+
# sort experts
|
48 |
+
sorted_experts, index_sorted_experts = torch.nonzero(gates).sort(0)
|
49 |
+
# drop indices
|
50 |
+
_, self._expert_index = sorted_experts.split(1, dim=1)
|
51 |
+
# get according batch index for each expert
|
52 |
+
self._batch_index = torch.nonzero(gates)[index_sorted_experts[:, 1], 0]
|
53 |
+
# calculate num samples that each expert gets
|
54 |
+
self._part_sizes = (gates > 0).sum(0).tolist()
|
55 |
+
# expand gates to match with self._batch_index
|
56 |
+
gates_exp = gates[self._batch_index.flatten()]
|
57 |
+
self._nonzero_gates = torch.gather(gates_exp, 1, self._expert_index)
|
58 |
+
|
59 |
+
def dispatch(self, D_Kernel, index_1):
|
60 |
+
b, c = D_Kernel.shape
|
61 |
+
|
62 |
+
D_Kernel_exp = D_Kernel[self._batch_index]
|
63 |
+
|
64 |
+
list1 = torch.zeros((1, self._num_experts))
|
65 |
+
list1[0, index_1] = b
|
66 |
+
|
67 |
+
return torch.split(D_Kernel_exp, list1[0].int().tolist(), dim=0)
|
68 |
+
|
69 |
+
def combine(self, expert_out, multiply_by_gates=True):
|
70 |
+
stitched = torch.cat(expert_out, 0).exp()
|
71 |
+
if multiply_by_gates:
|
72 |
+
stitched = stitched.mul(self._nonzero_gates.unsqueeze(1).unsqueeze(1))
|
73 |
+
|
74 |
+
zeros = torch.zeros(
|
75 |
+
(self._gates.size(0), expert_out[-1].size(1), expert_out[-1].size(2), expert_out[-1].size(3)),
|
76 |
+
requires_grad=True, device=stitched.device)
|
77 |
+
|
78 |
+
combined = zeros.index_add(0, self._batch_index, stitched.float())
|
79 |
+
|
80 |
+
# add eps to all zero values in order to avoid nans when going back to log space
|
81 |
+
combined[combined == 0] = np.finfo(float).eps
|
82 |
+
# back to log space
|
83 |
+
return combined.log()
|
84 |
+
|
85 |
+
def expert_to_gates(self):
|
86 |
+
"""Gate values corresponding to the examples in the per-expert `Tensor`s.
|
87 |
+
Returns:
|
88 |
+
a list of `num_experts` one-dimensional `Tensor`s with type `tf.float32`
|
89 |
+
and shapes `[expert_batch_size_i]`
|
90 |
+
"""
|
91 |
+
# split nonzero gates for each expert
|
92 |
+
return torch.split(self._nonzero_gates, self._part_sizes, dim=0)
|
93 |
+
|
94 |
+
|
95 |
+
class DecMoE(nn.Module):
|
96 |
+
"""Call a Sparsely gated mixture of experts layer with 1-layer Feed-Forward networks as experts.
|
97 |
+
Args:
|
98 |
+
input_size: integer - size of the input
|
99 |
+
output_size: integer - size of the input
|
100 |
+
num_experts: an integer - number of experts
|
101 |
+
hidden_size: an integer - hidden size of the experts
|
102 |
+
noisy_gating: a boolean
|
103 |
+
k: an integer - how many experts to use for each batch element
|
104 |
+
"""
|
105 |
+
|
106 |
+
def __init__(self, ds_inputsize, input_size, output_size, num_experts, hidden_size, noisy_gating=True, k=2,
|
107 |
+
trainingmode=True):
|
108 |
+
super(DecMoE, self).__init__()
|
109 |
+
self.noisy_gating = noisy_gating
|
110 |
+
self.num_experts = num_experts
|
111 |
+
self.output_size = output_size
|
112 |
+
self.input_size = input_size
|
113 |
+
self.hidden_size = hidden_size
|
114 |
+
self.training = trainingmode
|
115 |
+
self.k = k
|
116 |
+
# instantiate experts
|
117 |
+
self.experts = nn.ModuleList(
|
118 |
+
[generateKernel(hidden_size, 3), generateKernel(hidden_size, 5), generateKernel(hidden_size, 7),
|
119 |
+
generateKernel(hidden_size, 9)])
|
120 |
+
self.w_gate = nn.Parameter(torch.zeros(ds_inputsize, num_experts), requires_grad=True)
|
121 |
+
self.w_noise = nn.Parameter(torch.zeros(ds_inputsize, num_experts), requires_grad=True)
|
122 |
+
|
123 |
+
self.softplus = nn.Softplus()
|
124 |
+
self.softmax = nn.Softmax(1)
|
125 |
+
self.register_buffer("mean", torch.tensor([0.0]))
|
126 |
+
self.register_buffer("std", torch.tensor([1.0]))
|
127 |
+
assert (self.k <= self.num_experts)
|
128 |
+
|
129 |
+
def cv_squared(self, x):
|
130 |
+
"""The squared coefficient of variation of a sample.
|
131 |
+
Useful as a loss to encourage a positive distribution to be more uniform.
|
132 |
+
Epsilons added for numerical stability.
|
133 |
+
Returns 0 for an empty Tensor.
|
134 |
+
Args:
|
135 |
+
x: a `Tensor`.
|
136 |
+
Returns:
|
137 |
+
a `Scalar`.
|
138 |
+
"""
|
139 |
+
eps = 1e-10
|
140 |
+
# if only num_experts = 1
|
141 |
+
|
142 |
+
if x.shape[0] == 1:
|
143 |
+
return torch.tensor([0], device=x.device, dtype=x.dtype)
|
144 |
+
return x.float().var() / (x.float().mean() ** 2 + eps)
|
145 |
+
|
146 |
+
def _gates_to_load(self, gates):
|
147 |
+
"""Compute the true load per expert, given the gates.
|
148 |
+
The load is the number of examples for which the corresponding gate is >0.
|
149 |
+
Args:
|
150 |
+
gates: a `Tensor` of shape [batch_size, n]
|
151 |
+
Returns:
|
152 |
+
a float32 `Tensor` of shape [n]
|
153 |
+
"""
|
154 |
+
return (gates > 0).sum(0)
|
155 |
+
|
156 |
+
def _prob_in_top_k(self, clean_values, noisy_values, noise_stddev, noisy_top_values):
|
157 |
+
"""Helper function to NoisyTopKGating.
|
158 |
+
Computes the probability that value is in top k, given different random noise.
|
159 |
+
This gives us a way of backpropagating from a loss that balances the number
|
160 |
+
of times each expert is in the top k experts per example.
|
161 |
+
In the case of no noise, pass in None for noise_stddev, and the result will
|
162 |
+
not be differentiable.
|
163 |
+
Args:
|
164 |
+
clean_values: a `Tensor` of shape [batch, n].
|
165 |
+
noisy_values: a `Tensor` of shape [batch, n]. Equal to clean values plus
|
166 |
+
normally distributed noise with standard deviation noise_stddev.
|
167 |
+
noise_stddev: a `Tensor` of shape [batch, n], or None
|
168 |
+
noisy_top_values: a `Tensor` of shape [batch, m].
|
169 |
+
"values" Output of tf.top_k(noisy_top_values, m). m >= k+1
|
170 |
+
Returns:
|
171 |
+
a `Tensor` of shape [batch, n].
|
172 |
+
"""
|
173 |
+
batch = clean_values.size(0)
|
174 |
+
m = noisy_top_values.size(1)
|
175 |
+
top_values_flat = noisy_top_values.flatten()
|
176 |
+
|
177 |
+
threshold_positions_if_in = torch.arange(batch, device=clean_values.device) * m + self.k
|
178 |
+
threshold_if_in = torch.unsqueeze(torch.gather(top_values_flat, 0, threshold_positions_if_in), 1)
|
179 |
+
is_in = torch.gt(noisy_values, threshold_if_in)
|
180 |
+
threshold_positions_if_out = threshold_positions_if_in - 1
|
181 |
+
threshold_if_out = torch.unsqueeze(torch.gather(top_values_flat, 0, threshold_positions_if_out), 1)
|
182 |
+
# is each value currently in the top k.
|
183 |
+
normal = Normal(self.mean, self.std)
|
184 |
+
prob_if_in = normal.cdf((clean_values - threshold_if_in) / noise_stddev)
|
185 |
+
prob_if_out = normal.cdf((clean_values - threshold_if_out) / noise_stddev)
|
186 |
+
prob = torch.where(is_in, prob_if_in, prob_if_out)
|
187 |
+
return prob
|
188 |
+
|
189 |
+
def noisy_top_k_gating(self, x, train, noise_epsilon=1e-2):
|
190 |
+
"""Noisy top-k gating.
|
191 |
+
See paper: https://arxiv.org/abs/1701.06538.
|
192 |
+
Args:
|
193 |
+
x: input Tensor with shape [batch_size, input_size]
|
194 |
+
train: a boolean - we only add noise at training time.
|
195 |
+
noise_epsilon: a float
|
196 |
+
Returns:
|
197 |
+
gates: a Tensor with shape [batch_size, num_experts]
|
198 |
+
load: a Tensor with shape [num_experts]
|
199 |
+
"""
|
200 |
+
clean_logits = x @ self.w_gate
|
201 |
+
if self.noisy_gating and train:
|
202 |
+
raw_noise_stddev = x @ self.w_noise
|
203 |
+
noise_stddev = ((self.softplus(raw_noise_stddev) + noise_epsilon))
|
204 |
+
noisy_logits = clean_logits + (torch.randn_like(clean_logits) * noise_stddev)
|
205 |
+
logits = noisy_logits
|
206 |
+
else:
|
207 |
+
logits = clean_logits
|
208 |
+
|
209 |
+
# calculate topk + 1 that will be needed for the noisy gates
|
210 |
+
top_logits, top_indices = logits.topk(min(self.k + 1, self.num_experts), dim=1)
|
211 |
+
top_k_logits = top_logits[:, :self.k]
|
212 |
+
top_k_indices = top_indices[:, :self.k]
|
213 |
+
top_k_gates = self.softmax(top_k_logits)
|
214 |
+
|
215 |
+
zeros = torch.zeros_like(logits, requires_grad=True)
|
216 |
+
gates = zeros.scatter(1, top_k_indices, top_k_gates)
|
217 |
+
|
218 |
+
if self.noisy_gating and self.k < self.num_experts and train:
|
219 |
+
load = (self._prob_in_top_k(clean_logits, noisy_logits, noise_stddev, top_logits)).sum(0)
|
220 |
+
else:
|
221 |
+
load = self._gates_to_load(gates)
|
222 |
+
return gates, load, top_k_indices[0]
|
223 |
+
|
224 |
+
def forward(self, x_ds, D_Kernel, loss_coef=1e-2):
|
225 |
+
gates, load, index_1 = self.noisy_top_k_gating(x_ds, self.training)
|
226 |
+
# calculate importance loss
|
227 |
+
importance = gates.sum(0)
|
228 |
+
|
229 |
+
loss = self.cv_squared(importance) + self.cv_squared(load)
|
230 |
+
loss *= loss_coef
|
231 |
+
|
232 |
+
dispatcher = SparseDispatcher(self.num_experts, gates)
|
233 |
+
expert_kernel = dispatcher.dispatch(D_Kernel, index_1)
|
234 |
+
expert_outputs = [self.experts[i](expert_kernel[i]) for i in range(self.num_experts)]
|
235 |
+
return expert_outputs, loss
|
236 |
+
|
237 |
+
|
238 |
+
def default_conv(in_channels, out_channels, kernel_size, bias=True):
|
239 |
+
return nn.Conv2d(in_channels, out_channels, kernel_size, padding=(kernel_size // 2), bias=bias)
|
240 |
+
|
241 |
+
|
242 |
+
class CALayer(nn.Module):
|
243 |
+
def __init__(self, channel, reduction=16):
|
244 |
+
super(CALayer, self).__init__()
|
245 |
+
self.avg_pool = nn.AdaptiveAvgPool2d(1)
|
246 |
+
self.conv_du = nn.Sequential(
|
247 |
+
nn.Conv2d(channel, channel // reduction, 1, padding=0, bias=True),
|
248 |
+
nn.ReLU(inplace=True),
|
249 |
+
nn.Conv2d(channel // reduction, channel, 1, padding=0, bias=True),
|
250 |
+
nn.Sigmoid()
|
251 |
+
)
|
252 |
+
|
253 |
+
def forward(self, x):
|
254 |
+
y = self.avg_pool(x)
|
255 |
+
y = self.conv_du(y)
|
256 |
+
return x * y
|
257 |
+
|
258 |
+
|
259 |
+
class RCAB(nn.Module):
|
260 |
+
def __init__(
|
261 |
+
self, conv, n_feat, kernel_size, reduction,
|
262 |
+
bias=True, bn=False, act=nn.ReLU(True), res_scale=1):
|
263 |
+
|
264 |
+
super(RCAB, self).__init__()
|
265 |
+
modules_body = []
|
266 |
+
for i in range(2):
|
267 |
+
modules_body.append(conv(n_feat, n_feat, kernel_size, bias=bias))
|
268 |
+
if bn: modules_body.append(nn.BatchNorm2d(n_feat))
|
269 |
+
if i == 0: modules_body.append(act)
|
270 |
+
modules_body.append(CALayer(n_feat, reduction))
|
271 |
+
self.body = nn.Sequential(*modules_body)
|
272 |
+
self.res_scale = res_scale
|
273 |
+
|
274 |
+
def forward(self, x):
|
275 |
+
res = self.body(x)
|
276 |
+
res += x
|
277 |
+
return res
|
278 |
+
|
279 |
+
|
280 |
+
class ResidualGroup(nn.Module):
|
281 |
+
def __init__(self, conv, n_feat, kernel_size, reduction, n_resblocks):
|
282 |
+
super(ResidualGroup, self).__init__()
|
283 |
+
modules_body = []
|
284 |
+
modules_body = [
|
285 |
+
RCAB(
|
286 |
+
conv, n_feat, kernel_size, reduction, bias=True, bn=False,
|
287 |
+
act=nn.LeakyReLU(negative_slope=0.2, inplace=True), res_scale=1) \
|
288 |
+
for _ in range(n_resblocks)]
|
289 |
+
modules_body.append(conv(n_feat, n_feat, kernel_size))
|
290 |
+
self.body = nn.Sequential(*modules_body)
|
291 |
+
|
292 |
+
def forward(self, x):
|
293 |
+
res = self.body(x)
|
294 |
+
res += x
|
295 |
+
return res
|
296 |
+
|
297 |
+
|
298 |
+
class ResBlock(nn.Module):
|
299 |
+
def __init__(self, in_feat, out_feat, stride=1):
|
300 |
+
super(ResBlock, self).__init__()
|
301 |
+
self.backbone = nn.Sequential(
|
302 |
+
nn.Conv2d(in_feat, out_feat, kernel_size=3, stride=stride, padding=1, bias=False),
|
303 |
+
nn.BatchNorm2d(out_feat),
|
304 |
+
nn.LeakyReLU(0.1, True),
|
305 |
+
nn.Conv2d(out_feat, out_feat, kernel_size=3, padding=1, bias=False),
|
306 |
+
nn.BatchNorm2d(out_feat),
|
307 |
+
)
|
308 |
+
self.shortcut = nn.Sequential(
|
309 |
+
nn.Conv2d(in_feat, out_feat, kernel_size=1, stride=stride, bias=False),
|
310 |
+
nn.BatchNorm2d(out_feat)
|
311 |
+
)
|
312 |
+
|
313 |
+
def forward(self, x):
|
314 |
+
return nn.LeakyReLU(0.1, True)(self.backbone(x) + self.shortcut(x))
|
315 |
+
|
316 |
+
|
317 |
+
class DaEncoder(nn.Module):
|
318 |
+
def __init__(self, nfeats):
|
319 |
+
super(DaEncoder, self).__init__()
|
320 |
+
|
321 |
+
self.E_pre = nn.Sequential(
|
322 |
+
ResBlock(in_feat=1, out_feat=nfeats // 2, stride=1),
|
323 |
+
ResBlock(in_feat=nfeats // 2, out_feat=nfeats, stride=1),
|
324 |
+
ResBlock(in_feat=nfeats, out_feat=nfeats, stride=1)
|
325 |
+
)
|
326 |
+
self.E = nn.Sequential(
|
327 |
+
nn.Conv2d(nfeats, nfeats * 2, kernel_size=3, stride=2, padding=1),
|
328 |
+
nn.BatchNorm2d(nfeats * 2),
|
329 |
+
nn.LeakyReLU(0.1, True),
|
330 |
+
nn.Conv2d(nfeats * 2, nfeats * 4, kernel_size=3, stride=2, padding=1),
|
331 |
+
nn.BatchNorm2d(nfeats * 4),
|
332 |
+
nn.AdaptiveAvgPool2d(1)
|
333 |
+
)
|
334 |
+
|
335 |
+
def forward(self, x):
|
336 |
+
inter = self.E_pre(x)
|
337 |
+
fea = self.E(inter)
|
338 |
+
|
339 |
+
out = fea.squeeze(-1).squeeze(-1)
|
340 |
+
|
341 |
+
return fea, out, inter
|
342 |
+
|
343 |
+
|
344 |
+
class generateKernel(nn.Module):
|
345 |
+
def __init__(self, nfeats, kernel_size=5):
|
346 |
+
super(generateKernel, self).__init__()
|
347 |
+
|
348 |
+
self.mlp = nn.Sequential(
|
349 |
+
nn.Linear(nfeats * 4, nfeats),
|
350 |
+
nn.LeakyReLU(0.1, True),
|
351 |
+
nn.Linear(nfeats, kernel_size * kernel_size)
|
352 |
+
)
|
353 |
+
|
354 |
+
def forward(self, D_Kernel):
|
355 |
+
D_Kernel = self.mlp(D_Kernel)
|
356 |
+
return D_Kernel
|
357 |
+
|
358 |
+
|
359 |
+
class DAB(nn.Module):
|
360 |
+
def __init__(self):
|
361 |
+
super(DAB, self).__init__()
|
362 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
363 |
+
self.conv = default_conv(1, 1, 1)
|
364 |
+
|
365 |
+
def forward(self, x, D_Kernel):
|
366 |
+
b, c, h, w = x.size()
|
367 |
+
b1, l = D_Kernel.shape
|
368 |
+
kernel_size = int(math.sqrt(l))
|
369 |
+
with torch.no_grad():
|
370 |
+
kernel = D_Kernel.view(-1, 1, kernel_size, kernel_size)
|
371 |
+
out = F.conv2d(x.view(1, -1, h, w), kernel, groups=b * c, padding=(kernel_size - 1) // 2)
|
372 |
+
out = out.view(b, -1, h, w)
|
373 |
+
out = self.conv(self.relu(out).view(b, -1, h, w))
|
374 |
+
return out
|
375 |
+
|
376 |
+
|
377 |
+
class DR(nn.Module):
|
378 |
+
def __init__(self, nfeats, num_experts=4, k=3):
|
379 |
+
super(DR, self).__init__()
|
380 |
+
|
381 |
+
self.topK = k
|
382 |
+
self.num_experts = num_experts
|
383 |
+
self.start_idx = num_experts - k
|
384 |
+
|
385 |
+
self.c1 = ResBlock(in_feat=1, out_feat=nfeats, stride=1)
|
386 |
+
self.gap = nn.AdaptiveMaxPool2d(1)
|
387 |
+
self.gap2 = nn.AdaptiveAvgPool2d(1)
|
388 |
+
self.fc1 = nn.Linear(nfeats, nfeats * 4)
|
389 |
+
|
390 |
+
self.dab = [DAB(), DAB(), DAB()]
|
391 |
+
self.dab_list = nn.ModuleList(self.dab)
|
392 |
+
|
393 |
+
self.DecoderMoE = DecMoE(ds_inputsize=nfeats * 4, input_size=1, output_size=1, num_experts=num_experts,
|
394 |
+
hidden_size=nfeats,
|
395 |
+
noisy_gating=True, k=k, trainingmode=True)
|
396 |
+
|
397 |
+
self.conv = default_conv(1, 1, 1)
|
398 |
+
|
399 |
+
def forward(self, lr, sr, D_Kernel):
|
400 |
+
|
401 |
+
y1 = F.interpolate(lr, scale_factor=0.125, mode='bicubic', align_corners=True,
|
402 |
+
recompute_scale_factor=True)
|
403 |
+
y2 = self.c1(y1)
|
404 |
+
y3 = self.gap(y2) + self.gap2(y2)
|
405 |
+
y4 = y3.view(y3.shape[0], -1)
|
406 |
+
y5 = self.fc1(y4)
|
407 |
+
|
408 |
+
D_Kernel_list, aux_loss = self.DecoderMoE(y5, D_Kernel, loss_coef=0.02)
|
409 |
+
|
410 |
+
sorted_D_Kernel_list = sorted(D_Kernel_list, key=lambda x: (x.size(0), x.size(1)))
|
411 |
+
|
412 |
+
sum_result = None
|
413 |
+
for iidx in range(self.start_idx, self.num_experts):
|
414 |
+
res_d = self.dab_list[iidx - self.start_idx](sr, sorted_D_Kernel_list[iidx])
|
415 |
+
if sum_result is None:
|
416 |
+
sum_result = res_d
|
417 |
+
else:
|
418 |
+
sum_result += res_d
|
419 |
+
|
420 |
+
out = self.conv(sum_result)
|
421 |
+
return out, aux_loss
|
422 |
+
|
423 |
+
|
424 |
+
class DA_rgb(nn.Module):
|
425 |
+
def __init__(self, channels_in, channels_out, kernel_size, reduction):
|
426 |
+
super(DA_rgb, self).__init__()
|
427 |
+
|
428 |
+
self.kernel_size = kernel_size
|
429 |
+
self.channels_out = channels_out
|
430 |
+
self.channels_in = channels_in
|
431 |
+
|
432 |
+
self.dcnrgb = DCN_layer_rgb(self.channels_in, self.channels_out, kernel_size,
|
433 |
+
padding=(kernel_size - 1) // 2, bias=False)
|
434 |
+
|
435 |
+
self.rcab1 = RCAB(default_conv, channels_out, 3, reduction)
|
436 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
437 |
+
self.conv = default_conv(channels_in, channels_out, 3)
|
438 |
+
|
439 |
+
def forward(self, x, inter, fea):
|
440 |
+
out1 = self.rcab1(x)
|
441 |
+
out2 = self.dcnrgb(out1, inter, fea)
|
442 |
+
out = self.conv(out2 + out1)
|
443 |
+
return out
|
444 |
+
|
445 |
+
|
446 |
+
class FusionBlock(nn.Module):
|
447 |
+
def __init__(self, channels_in, channels_out):
|
448 |
+
super(FusionBlock, self).__init__()
|
449 |
+
self.conv1 = default_conv(channels_in, channels_in // 4, 1)
|
450 |
+
self.conv2 = default_conv(channels_in, channels_in // 4, 1)
|
451 |
+
self.conv3 = default_conv(channels_in // 4, channels_in, 1)
|
452 |
+
self.sigmoid = nn.Sigmoid()
|
453 |
+
|
454 |
+
self.conv = default_conv(2 * channels_in, channels_out, 3)
|
455 |
+
|
456 |
+
def forward(self, rgb, dep, inter):
|
457 |
+
inter1 = self.conv1(inter)
|
458 |
+
rgb1 = self.conv2(rgb)
|
459 |
+
|
460 |
+
w = torch.sigmoid(inter1)
|
461 |
+
rgb2 = rgb1 * w
|
462 |
+
rgb3 = self.conv3(rgb2) + rgb
|
463 |
+
cat1 = torch.cat([rgb3, dep], dim=1)
|
464 |
+
out = self.conv(cat1)
|
465 |
+
|
466 |
+
return out
|
467 |
+
|
468 |
+
|
469 |
+
class DOFT(nn.Module):
|
470 |
+
def __init__(self, channels_in, channels_out, kernel_size, reduction):
|
471 |
+
super(DOFT, self).__init__()
|
472 |
+
self.channels_out = channels_out
|
473 |
+
self.channels_in = channels_in
|
474 |
+
self.kernel_size = kernel_size
|
475 |
+
|
476 |
+
self.DA_rgb = DA_rgb(channels_in, channels_out, kernel_size, reduction)
|
477 |
+
self.fb = FusionBlock(channels_in, channels_out)
|
478 |
+
|
479 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
480 |
+
|
481 |
+
def forward(self, x, inter, rgb, fea):
|
482 |
+
rgb = self.DA_rgb(rgb, inter, fea)
|
483 |
+
|
484 |
+
out1 = self.fb(rgb, x, inter)
|
485 |
+
out = x + out1
|
486 |
+
return out
|
487 |
+
|
488 |
+
|
489 |
+
class DSRN(nn.Module):
|
490 |
+
def __init__(self, nfeats=64, reduction=16, conv=default_conv):
|
491 |
+
super(DSRN, self).__init__()
|
492 |
+
|
493 |
+
kernel_size = 3
|
494 |
+
|
495 |
+
n_feats = nfeats
|
496 |
+
|
497 |
+
# head module
|
498 |
+
modules_head = [conv(1, n_feats, kernel_size)]
|
499 |
+
self.head = nn.Sequential(*modules_head)
|
500 |
+
|
501 |
+
modules_head_rgb = [conv(3, n_feats, kernel_size)]
|
502 |
+
self.head_rgb = nn.Sequential(*modules_head_rgb)
|
503 |
+
|
504 |
+
self.dgm1 = DOFT(n_feats, n_feats, 3, reduction)
|
505 |
+
self.dgm2 = DOFT(n_feats, n_feats, 3, reduction)
|
506 |
+
self.dgm3 = DOFT(n_feats, n_feats, 3, reduction)
|
507 |
+
self.dgm4 = DOFT(n_feats, n_feats, 3, reduction)
|
508 |
+
self.dgm5 = DOFT(n_feats, n_feats, 3, reduction)
|
509 |
+
|
510 |
+
self.c_d1 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
511 |
+
self.c_d2 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
512 |
+
self.c_d3 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
513 |
+
self.c_d4 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
514 |
+
|
515 |
+
modules_d5 = [conv(5 * n_feats, n_feats, 1),
|
516 |
+
ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)]
|
517 |
+
self.c_d5 = nn.Sequential(*modules_d5)
|
518 |
+
|
519 |
+
self.c_r1 = conv(n_feats, n_feats, kernel_size)
|
520 |
+
self.c_r2 = conv(n_feats, n_feats, kernel_size)
|
521 |
+
self.c_r3 = conv(n_feats, n_feats, kernel_size)
|
522 |
+
self.c_r4 = conv(n_feats, n_feats, kernel_size)
|
523 |
+
|
524 |
+
self.act = nn.LeakyReLU(0.1, True)
|
525 |
+
|
526 |
+
# tail
|
527 |
+
modules_tail = [conv(n_feats, 1, kernel_size)]
|
528 |
+
self.tail = nn.Sequential(*modules_tail)
|
529 |
+
|
530 |
+
def forward(self, x, inter, rgb, fea):
|
531 |
+
# head
|
532 |
+
x = self.head(x)
|
533 |
+
rgb = self.head_rgb(rgb)
|
534 |
+
rgb1 = self.c_r1(rgb)
|
535 |
+
rgb2 = self.c_r2(self.act(rgb1))
|
536 |
+
rgb3 = self.c_r3(self.act(rgb2))
|
537 |
+
rgb4 = self.c_r4(self.act(rgb3))
|
538 |
+
|
539 |
+
dep10 = self.dgm1(x, inter, rgb, fea)
|
540 |
+
dep1 = self.c_d1(dep10)
|
541 |
+
dep20 = self.dgm2(dep1, inter, rgb1, fea)
|
542 |
+
dep2 = self.c_d2(self.act(dep20))
|
543 |
+
dep30 = self.dgm3(dep2, inter, rgb2, fea)
|
544 |
+
dep3 = self.c_d3(self.act(dep30))
|
545 |
+
dep40 = self.dgm4(dep3, inter, rgb3, fea)
|
546 |
+
dep4 = self.c_d4(self.act(dep40))
|
547 |
+
dep50 = self.dgm5(dep4, inter, rgb4, fea)
|
548 |
+
|
549 |
+
cat1 = torch.cat([dep1, dep2, dep3, dep4, dep50], dim=1)
|
550 |
+
dep6 = self.c_d5(cat1)
|
551 |
+
|
552 |
+
res = dep6 + x
|
553 |
+
|
554 |
+
out = self.tail(res)
|
555 |
+
|
556 |
+
return out
|
557 |
+
|
558 |
+
|
559 |
+
class Net(nn.Module):
|
560 |
+
def __init__(self, tiny_model=False):
|
561 |
+
super(Net, self).__init__()
|
562 |
+
|
563 |
+
if tiny_model:
|
564 |
+
n_feats = 24
|
565 |
+
reduction = 4
|
566 |
+
else:
|
567 |
+
n_feats = 64
|
568 |
+
reduction = 16
|
569 |
+
|
570 |
+
# Restorer
|
571 |
+
self.R = DSRN(nfeats=n_feats, reduction=reduction)
|
572 |
+
self.training = False
|
573 |
+
# Encoder
|
574 |
+
self.Enc = DaEncoder(nfeats=n_feats)
|
575 |
+
self.Dab = DR(nfeats=n_feats)
|
576 |
+
|
577 |
+
def forward(self, x_query, rgb):
|
578 |
+
|
579 |
+
fea, d_kernel, inter = self.Enc(x_query)
|
580 |
+
restored = self.R(x_query, inter, rgb, fea)
|
581 |
+
|
582 |
+
if self.training:
|
583 |
+
d_lr_, aux_loss = self.Dab(x_query, restored, d_kernel)
|
584 |
+
return restored, d_lr_, aux_loss
|
585 |
+
else:
|
586 |
+
return restored
|
net/dornet_ddp.py
ADDED
@@ -0,0 +1,600 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
import torch.nn as nn
|
3 |
+
from .deform_conv import DCN_layer_rgb
|
4 |
+
import torch.nn.functional as F
|
5 |
+
import math
|
6 |
+
from net.CR import *
|
7 |
+
from torch.distributions.normal import Normal
|
8 |
+
import numpy as np
|
9 |
+
|
10 |
+
|
11 |
+
class SparseDispatcher(object):
|
12 |
+
"""Helper for implementing a mixture of experts.
|
13 |
+
The purpose of this class is to create input minibatches for the
|
14 |
+
experts and to combine the results of the experts to form a unified
|
15 |
+
output tensor.
|
16 |
+
There are two functions:
|
17 |
+
dispatch - take an input Tensor and create input Tensors for each expert.
|
18 |
+
combine - take output Tensors from each expert and form a combined output
|
19 |
+
Tensor. Outputs from different experts for the same batch element are
|
20 |
+
summed together, weighted by the provided "gates".
|
21 |
+
The class is initialized with a "gates" Tensor, which specifies which
|
22 |
+
batch elements go to which experts, and the weights to use when combining
|
23 |
+
the outputs. Batch element b is sent to expert e iff gates[b, e] != 0.
|
24 |
+
The inputs and outputs are all two-dimensional [batch, depth].
|
25 |
+
Caller is responsible for collapsing additional dimensions prior to
|
26 |
+
calling this class and reshaping the output to the original shape.
|
27 |
+
See common_layers.reshape_like().
|
28 |
+
Example use:
|
29 |
+
gates: a float32 `Tensor` with shape `[batch_size, num_experts]`
|
30 |
+
inputs: a float32 `Tensor` with shape `[batch_size, input_size]`
|
31 |
+
experts: a list of length `num_experts` containing sub-networks.
|
32 |
+
dispatcher = SparseDispatcher(num_experts, gates)
|
33 |
+
expert_inputs = dispatcher.dispatch(inputs)
|
34 |
+
expert_outputs = [experts[i](expert_inputs[i]) for i in range(num_experts)]
|
35 |
+
outputs = dispatcher.combine(expert_outputs)
|
36 |
+
The preceding code sets the output for a particular example b to:
|
37 |
+
output[b] = Sum_i(gates[b, i] * experts[i](inputs[b]))
|
38 |
+
This class takes advantage of sparsity in the gate matrix by including in the
|
39 |
+
`Tensor`s for expert i only the batch elements for which `gates[b, i] > 0`.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, num_experts, gates):
|
43 |
+
"""Create a SparseDispatcher."""
|
44 |
+
|
45 |
+
self._gates = gates
|
46 |
+
self._num_experts = num_experts
|
47 |
+
# sort experts
|
48 |
+
sorted_experts, index_sorted_experts = torch.nonzero(gates).sort(0)
|
49 |
+
# drop indices
|
50 |
+
_, self._expert_index = sorted_experts.split(1, dim=1)
|
51 |
+
# get according batch index for each expert
|
52 |
+
self._batch_index = torch.nonzero(gates)[index_sorted_experts[:, 1], 0]
|
53 |
+
# calculate num samples that each expert gets
|
54 |
+
self._part_sizes = (gates > 0).sum(0).tolist()
|
55 |
+
# expand gates to match with self._batch_index
|
56 |
+
gates_exp = gates[self._batch_index.flatten()]
|
57 |
+
self._nonzero_gates = torch.gather(gates_exp, 1, self._expert_index)
|
58 |
+
|
59 |
+
def dispatch(self, D_Kernel, index_1):
|
60 |
+
b, c = D_Kernel.shape
|
61 |
+
|
62 |
+
D_Kernel_exp = D_Kernel[self._batch_index]
|
63 |
+
|
64 |
+
list1 = torch.zeros((1, self._num_experts))
|
65 |
+
list1[0, index_1] = b
|
66 |
+
|
67 |
+
return torch.split(D_Kernel_exp, list1[0].int().tolist(), dim=0)
|
68 |
+
|
69 |
+
def combine(self, expert_out, multiply_by_gates=True):
|
70 |
+
stitched = torch.cat(expert_out, 0).exp()
|
71 |
+
if multiply_by_gates:
|
72 |
+
stitched = stitched.mul(self._nonzero_gates.unsqueeze(1).unsqueeze(1))
|
73 |
+
|
74 |
+
zeros = torch.zeros(
|
75 |
+
(self._gates.size(0), expert_out[-1].size(1), expert_out[-1].size(2), expert_out[-1].size(3)),
|
76 |
+
requires_grad=True, device=stitched.device)
|
77 |
+
|
78 |
+
combined = zeros.index_add(0, self._batch_index, stitched.float())
|
79 |
+
|
80 |
+
# add eps to all zero values in order to avoid nans when going back to log space
|
81 |
+
combined[combined == 0] = np.finfo(float).eps
|
82 |
+
# back to log space
|
83 |
+
return combined.log()
|
84 |
+
|
85 |
+
def expert_to_gates(self):
|
86 |
+
"""Gate values corresponding to the examples in the per-expert `Tensor`s.
|
87 |
+
Returns:
|
88 |
+
a list of `num_experts` one-dimensional `Tensor`s with type `tf.float32`
|
89 |
+
and shapes `[expert_batch_size_i]`
|
90 |
+
"""
|
91 |
+
# split nonzero gates for each expert
|
92 |
+
return torch.split(self._nonzero_gates, self._part_sizes, dim=0)
|
93 |
+
|
94 |
+
|
95 |
+
class DecMoE(nn.Module):
|
96 |
+
"""Call a Sparsely gated mixture of experts layer with 1-layer Feed-Forward networks as experts.
|
97 |
+
Args:
|
98 |
+
input_size: integer - size of the input
|
99 |
+
output_size: integer - size of the input
|
100 |
+
num_experts: an integer - number of experts
|
101 |
+
hidden_size: an integer - hidden size of the experts
|
102 |
+
noisy_gating: a boolean
|
103 |
+
k: an integer - how many experts to use for each batch element
|
104 |
+
"""
|
105 |
+
|
106 |
+
def __init__(self, ds_inputsize, input_size, output_size, num_experts, hidden_size, noisy_gating=True, k=2,
|
107 |
+
trainingmode=True):
|
108 |
+
super(DecMoE, self).__init__()
|
109 |
+
self.noisy_gating = noisy_gating
|
110 |
+
self.num_experts = num_experts
|
111 |
+
self.output_size = output_size
|
112 |
+
self.input_size = input_size
|
113 |
+
self.hidden_size = hidden_size
|
114 |
+
self.training = trainingmode
|
115 |
+
self.k = k
|
116 |
+
# instantiate experts
|
117 |
+
self.experts = nn.ModuleList(
|
118 |
+
[generateKernel(hidden_size, 3), generateKernel(hidden_size, 5), generateKernel(hidden_size, 7),
|
119 |
+
generateKernel(hidden_size, 9)])
|
120 |
+
self.w_gate = nn.Parameter(torch.zeros(ds_inputsize, num_experts), requires_grad=True)
|
121 |
+
self.w_noise = nn.Parameter(torch.zeros(ds_inputsize, num_experts), requires_grad=True)
|
122 |
+
|
123 |
+
self.softplus = nn.Softplus()
|
124 |
+
self.softmax = nn.Softmax(1)
|
125 |
+
self.register_buffer("mean", torch.tensor([0.0]))
|
126 |
+
self.register_buffer("std", torch.tensor([1.0]))
|
127 |
+
assert (self.k <= self.num_experts)
|
128 |
+
|
129 |
+
def cv_squared(self, x):
|
130 |
+
"""The squared coefficient of variation of a sample.
|
131 |
+
Useful as a loss to encourage a positive distribution to be more uniform.
|
132 |
+
Epsilons added for numerical stability.
|
133 |
+
Returns 0 for an empty Tensor.
|
134 |
+
Args:
|
135 |
+
x: a `Tensor`.
|
136 |
+
Returns:
|
137 |
+
a `Scalar`.
|
138 |
+
"""
|
139 |
+
eps = 1e-10
|
140 |
+
# if only num_experts = 1
|
141 |
+
|
142 |
+
if x.shape[0] == 1:
|
143 |
+
return torch.tensor([0], device=x.device, dtype=x.dtype)
|
144 |
+
return x.float().var() / (x.float().mean() ** 2 + eps)
|
145 |
+
|
146 |
+
def _gates_to_load(self, gates):
|
147 |
+
"""Compute the true load per expert, given the gates.
|
148 |
+
The load is the number of examples for which the corresponding gate is >0.
|
149 |
+
Args:
|
150 |
+
gates: a `Tensor` of shape [batch_size, n]
|
151 |
+
Returns:
|
152 |
+
a float32 `Tensor` of shape [n]
|
153 |
+
"""
|
154 |
+
return (gates > 0).sum(0)
|
155 |
+
|
156 |
+
def _prob_in_top_k(self, clean_values, noisy_values, noise_stddev, noisy_top_values):
|
157 |
+
"""Helper function to NoisyTopKGating.
|
158 |
+
Computes the probability that value is in top k, given different random noise.
|
159 |
+
This gives us a way of backpropagating from a loss that balances the number
|
160 |
+
of times each expert is in the top k experts per example.
|
161 |
+
In the case of no noise, pass in None for noise_stddev, and the result will
|
162 |
+
not be differentiable.
|
163 |
+
Args:
|
164 |
+
clean_values: a `Tensor` of shape [batch, n].
|
165 |
+
noisy_values: a `Tensor` of shape [batch, n]. Equal to clean values plus
|
166 |
+
normally distributed noise with standard deviation noise_stddev.
|
167 |
+
noise_stddev: a `Tensor` of shape [batch, n], or None
|
168 |
+
noisy_top_values: a `Tensor` of shape [batch, m].
|
169 |
+
"values" Output of tf.top_k(noisy_top_values, m). m >= k+1
|
170 |
+
Returns:
|
171 |
+
a `Tensor` of shape [batch, n].
|
172 |
+
"""
|
173 |
+
batch = clean_values.size(0)
|
174 |
+
m = noisy_top_values.size(1)
|
175 |
+
top_values_flat = noisy_top_values.flatten()
|
176 |
+
|
177 |
+
threshold_positions_if_in = torch.arange(batch, device=clean_values.device) * m + self.k
|
178 |
+
threshold_if_in = torch.unsqueeze(torch.gather(top_values_flat, 0, threshold_positions_if_in), 1)
|
179 |
+
is_in = torch.gt(noisy_values, threshold_if_in)
|
180 |
+
threshold_positions_if_out = threshold_positions_if_in - 1
|
181 |
+
threshold_if_out = torch.unsqueeze(torch.gather(top_values_flat, 0, threshold_positions_if_out), 1)
|
182 |
+
# is each value currently in the top k.
|
183 |
+
normal = Normal(self.mean, self.std)
|
184 |
+
prob_if_in = normal.cdf((clean_values - threshold_if_in) / noise_stddev)
|
185 |
+
prob_if_out = normal.cdf((clean_values - threshold_if_out) / noise_stddev)
|
186 |
+
prob = torch.where(is_in, prob_if_in, prob_if_out)
|
187 |
+
return prob
|
188 |
+
|
189 |
+
def noisy_top_k_gating(self, x, train, noise_epsilon=1e-2):
|
190 |
+
"""Noisy top-k gating.
|
191 |
+
See paper: https://arxiv.org/abs/1701.06538.
|
192 |
+
Args:
|
193 |
+
x: input Tensor with shape [batch_size, input_size]
|
194 |
+
train: a boolean - we only add noise at training time.
|
195 |
+
noise_epsilon: a float
|
196 |
+
Returns:
|
197 |
+
gates: a Tensor with shape [batch_size, num_experts]
|
198 |
+
load: a Tensor with shape [num_experts]
|
199 |
+
"""
|
200 |
+
clean_logits = x @ self.w_gate
|
201 |
+
if self.noisy_gating and train:
|
202 |
+
raw_noise_stddev = x @ self.w_noise
|
203 |
+
noise_stddev = ((self.softplus(raw_noise_stddev) + noise_epsilon))
|
204 |
+
noisy_logits = clean_logits + (torch.randn_like(clean_logits) * noise_stddev)
|
205 |
+
logits = noisy_logits
|
206 |
+
else:
|
207 |
+
logits = clean_logits
|
208 |
+
|
209 |
+
# calculate topk + 1 that will be needed for the noisy gates
|
210 |
+
top_logits, top_indices = logits.topk(min(self.k + 1, self.num_experts), dim=1)
|
211 |
+
top_k_logits = top_logits[:, :self.k]
|
212 |
+
top_k_indices = top_indices[:, :self.k]
|
213 |
+
top_k_gates = self.softmax(top_k_logits)
|
214 |
+
|
215 |
+
zeros = torch.zeros_like(logits, requires_grad=True)
|
216 |
+
gates = zeros.scatter(1, top_k_indices, top_k_gates)
|
217 |
+
|
218 |
+
if self.noisy_gating and self.k < self.num_experts and train:
|
219 |
+
load = (self._prob_in_top_k(clean_logits, noisy_logits, noise_stddev, top_logits)).sum(0)
|
220 |
+
else:
|
221 |
+
load = self._gates_to_load(gates)
|
222 |
+
return gates, load, top_k_indices[0]
|
223 |
+
|
224 |
+
def forward(self, x_ds, D_Kernel, loss_coef=1e-2):
|
225 |
+
gates, load, index_1 = self.noisy_top_k_gating(x_ds, self.training)
|
226 |
+
# calculate importance loss
|
227 |
+
importance = gates.sum(0)
|
228 |
+
|
229 |
+
loss = self.cv_squared(importance) + self.cv_squared(load)
|
230 |
+
loss *= loss_coef
|
231 |
+
|
232 |
+
dispatcher = SparseDispatcher(self.num_experts, gates)
|
233 |
+
expert_kernel = dispatcher.dispatch(D_Kernel, index_1)
|
234 |
+
expert_outputs = [self.experts[i](expert_kernel[i]) for i in range(self.num_experts)]
|
235 |
+
|
236 |
+
return expert_outputs, loss
|
237 |
+
|
238 |
+
|
239 |
+
def default_conv(in_channels, out_channels, kernel_size, bias=True):
|
240 |
+
return nn.Conv2d(in_channels, out_channels, kernel_size, padding=(kernel_size // 2), bias=bias)
|
241 |
+
|
242 |
+
|
243 |
+
class CALayer(nn.Module):
|
244 |
+
def __init__(self, channel, reduction=16):
|
245 |
+
super(CALayer, self).__init__()
|
246 |
+
self.avg_pool = nn.AdaptiveAvgPool2d(1)
|
247 |
+
self.conv_du = nn.Sequential(
|
248 |
+
nn.Conv2d(channel, channel // reduction, 1, padding=0, bias=True),
|
249 |
+
nn.ReLU(inplace=True),
|
250 |
+
nn.Conv2d(channel // reduction, channel, 1, padding=0, bias=True),
|
251 |
+
nn.Sigmoid()
|
252 |
+
)
|
253 |
+
|
254 |
+
def forward(self, x):
|
255 |
+
y = self.avg_pool(x)
|
256 |
+
y = self.conv_du(y)
|
257 |
+
return x * y
|
258 |
+
|
259 |
+
|
260 |
+
class RCAB(nn.Module):
|
261 |
+
def __init__(
|
262 |
+
self, conv, n_feat, kernel_size, reduction,
|
263 |
+
bias=True, bn=False, act=nn.ReLU(True), res_scale=1):
|
264 |
+
|
265 |
+
super(RCAB, self).__init__()
|
266 |
+
modules_body = []
|
267 |
+
for i in range(2):
|
268 |
+
modules_body.append(conv(n_feat, n_feat, kernel_size, bias=bias))
|
269 |
+
if bn: modules_body.append(nn.BatchNorm2d(n_feat))
|
270 |
+
if i == 0: modules_body.append(act)
|
271 |
+
modules_body.append(CALayer(n_feat, reduction))
|
272 |
+
self.body = nn.Sequential(*modules_body)
|
273 |
+
self.res_scale = res_scale
|
274 |
+
|
275 |
+
def forward(self, x):
|
276 |
+
res = self.body(x)
|
277 |
+
res += x
|
278 |
+
return res
|
279 |
+
|
280 |
+
|
281 |
+
class ResidualGroup(nn.Module):
|
282 |
+
def __init__(self, conv, n_feat, kernel_size, reduction, n_resblocks):
|
283 |
+
super(ResidualGroup, self).__init__()
|
284 |
+
modules_body = []
|
285 |
+
modules_body = [
|
286 |
+
RCAB(
|
287 |
+
conv, n_feat, kernel_size, reduction, bias=True, bn=False,
|
288 |
+
act=nn.LeakyReLU(negative_slope=0.2, inplace=True), res_scale=1) \
|
289 |
+
for _ in range(n_resblocks)]
|
290 |
+
modules_body.append(conv(n_feat, n_feat, kernel_size))
|
291 |
+
self.body = nn.Sequential(*modules_body)
|
292 |
+
|
293 |
+
def forward(self, x):
|
294 |
+
res = self.body(x)
|
295 |
+
res += x
|
296 |
+
return res
|
297 |
+
|
298 |
+
|
299 |
+
class ResBlock(nn.Module):
|
300 |
+
def __init__(self, in_feat, out_feat, stride=1):
|
301 |
+
super(ResBlock, self).__init__()
|
302 |
+
self.backbone = nn.Sequential(
|
303 |
+
nn.Conv2d(in_feat, out_feat, kernel_size=3, stride=stride, padding=1, bias=False),
|
304 |
+
nn.BatchNorm2d(out_feat),
|
305 |
+
nn.LeakyReLU(0.1, True),
|
306 |
+
nn.Conv2d(out_feat, out_feat, kernel_size=3, padding=1, bias=False),
|
307 |
+
nn.BatchNorm2d(out_feat),
|
308 |
+
)
|
309 |
+
self.shortcut = nn.Sequential(
|
310 |
+
nn.Conv2d(in_feat, out_feat, kernel_size=1, stride=stride, bias=False),
|
311 |
+
nn.BatchNorm2d(out_feat)
|
312 |
+
)
|
313 |
+
|
314 |
+
def forward(self, x):
|
315 |
+
return nn.LeakyReLU(0.1, True)(self.backbone(x) + self.shortcut(x))
|
316 |
+
|
317 |
+
|
318 |
+
class DaEncoder(nn.Module):
|
319 |
+
def __init__(self, nfeats):
|
320 |
+
super(DaEncoder, self).__init__()
|
321 |
+
|
322 |
+
self.E_pre = nn.Sequential(
|
323 |
+
ResBlock(in_feat=1, out_feat=nfeats // 2, stride=1),
|
324 |
+
ResBlock(in_feat=nfeats // 2, out_feat=nfeats, stride=1),
|
325 |
+
ResBlock(in_feat=nfeats, out_feat=nfeats, stride=1)
|
326 |
+
)
|
327 |
+
self.E = nn.Sequential(
|
328 |
+
nn.Conv2d(nfeats, nfeats * 2, kernel_size=3, stride=2, padding=1),
|
329 |
+
nn.BatchNorm2d(nfeats * 2),
|
330 |
+
nn.LeakyReLU(0.1, True),
|
331 |
+
nn.Conv2d(nfeats * 2, nfeats * 4, kernel_size=3, stride=2, padding=1),
|
332 |
+
nn.BatchNorm2d(nfeats * 4),
|
333 |
+
nn.AdaptiveAvgPool2d(1)
|
334 |
+
)
|
335 |
+
|
336 |
+
def forward(self, x):
|
337 |
+
inter = self.E_pre(x)
|
338 |
+
fea = self.E(inter)
|
339 |
+
|
340 |
+
out = fea.squeeze(-1).squeeze(-1)
|
341 |
+
|
342 |
+
return fea, out, inter
|
343 |
+
|
344 |
+
|
345 |
+
class generateKernel(nn.Module):
|
346 |
+
def __init__(self, nfeats, kernel_size=5):
|
347 |
+
super(generateKernel, self).__init__()
|
348 |
+
|
349 |
+
self.mlp = nn.Sequential(
|
350 |
+
nn.Linear(nfeats * 4, nfeats),
|
351 |
+
nn.LeakyReLU(0.1, True),
|
352 |
+
nn.Linear(nfeats, kernel_size * kernel_size)
|
353 |
+
)
|
354 |
+
|
355 |
+
def forward(self, D_Kernel):
|
356 |
+
D_Kernel = self.mlp(D_Kernel)
|
357 |
+
return D_Kernel
|
358 |
+
|
359 |
+
|
360 |
+
class DAB(nn.Module):
|
361 |
+
def __init__(self):
|
362 |
+
super(DAB, self).__init__()
|
363 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
364 |
+
self.conv = default_conv(1, 1, 1)
|
365 |
+
|
366 |
+
def forward(self, x, D_Kernel):
|
367 |
+
b, c, h, w = x.size()
|
368 |
+
b1, l = D_Kernel.shape
|
369 |
+
kernel_size = int(math.sqrt(l))
|
370 |
+
with torch.no_grad():
|
371 |
+
kernel = D_Kernel.view(-1, 1, kernel_size, kernel_size)
|
372 |
+
out = F.conv2d(x.view(1, -1, h, w), kernel, groups=b * c, padding=(kernel_size - 1) // 2)
|
373 |
+
out = out.view(b, -1, h, w)
|
374 |
+
out = self.conv(self.relu(out).view(b, -1, h, w))
|
375 |
+
return out
|
376 |
+
|
377 |
+
|
378 |
+
class DR(nn.Module):
|
379 |
+
def __init__(self, nfeats, num_experts=4, k=3):
|
380 |
+
super(DR, self).__init__()
|
381 |
+
|
382 |
+
self.topK = k
|
383 |
+
self.num_experts = num_experts
|
384 |
+
self.start_idx = num_experts - k
|
385 |
+
|
386 |
+
self.c1 = ResBlock(in_feat=1, out_feat=nfeats, stride=1)
|
387 |
+
self.gap = nn.AdaptiveMaxPool2d(1)
|
388 |
+
self.gap2 = nn.AdaptiveAvgPool2d(1)
|
389 |
+
self.fc1 = nn.Linear(nfeats, nfeats * 4)
|
390 |
+
|
391 |
+
self.dab = [DAB(), DAB(), DAB()]
|
392 |
+
self.dab_list = nn.ModuleList(self.dab)
|
393 |
+
|
394 |
+
self.DecoderMoE = DecMoE(ds_inputsize=nfeats * 4, input_size=1, output_size=1, num_experts=num_experts,
|
395 |
+
hidden_size=nfeats,
|
396 |
+
noisy_gating=True, k=k, trainingmode=True)
|
397 |
+
|
398 |
+
self.conv = default_conv(1, 1, 1)
|
399 |
+
|
400 |
+
def forward(self, lr, sr, D_Kernel):
|
401 |
+
|
402 |
+
y1 = F.interpolate(lr, scale_factor=0.125, mode='bicubic', align_corners=True,
|
403 |
+
recompute_scale_factor=True)
|
404 |
+
y2 = self.c1(y1)
|
405 |
+
y3 = self.gap(y2) + self.gap2(y2)
|
406 |
+
y4 = y3.view(y3.shape[0], -1)
|
407 |
+
y5 = self.fc1(y4)
|
408 |
+
|
409 |
+
D_Kernel_list, aux_loss = self.DecoderMoE(y5, D_Kernel, loss_coef=0.02)
|
410 |
+
|
411 |
+
sorted_D_Kernel_list = sorted(D_Kernel_list, key=lambda x: (x.size(0), x.size(1)))
|
412 |
+
|
413 |
+
sum_result = None
|
414 |
+
for iidx in range(self.start_idx, self.num_experts):
|
415 |
+
res_d = self.dab_list[iidx - self.start_idx](sr, sorted_D_Kernel_list[iidx])
|
416 |
+
if sum_result is None:
|
417 |
+
sum_result = res_d
|
418 |
+
else:
|
419 |
+
sum_result += res_d
|
420 |
+
|
421 |
+
out = self.conv(sum_result)
|
422 |
+
return out, aux_loss
|
423 |
+
|
424 |
+
|
425 |
+
class DA_rgb(nn.Module):
|
426 |
+
def __init__(self, channels_in, channels_out, kernel_size, reduction):
|
427 |
+
super(DA_rgb, self).__init__()
|
428 |
+
|
429 |
+
self.kernel_size = kernel_size
|
430 |
+
self.channels_out = channels_out
|
431 |
+
self.channels_in = channels_in
|
432 |
+
|
433 |
+
self.dcnrgb = DCN_layer_rgb(self.channels_in, self.channels_out, kernel_size,
|
434 |
+
padding=(kernel_size - 1) // 2, bias=False)
|
435 |
+
|
436 |
+
self.rcab1 = RCAB(default_conv, channels_out, 3, reduction)
|
437 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
438 |
+
self.conv = default_conv(channels_in, channels_out, 3)
|
439 |
+
|
440 |
+
def forward(self, x, inter, fea):
|
441 |
+
out1 = self.rcab1(x)
|
442 |
+
out2 = self.dcnrgb(out1, inter, fea)
|
443 |
+
out = self.conv(out2 + out1)
|
444 |
+
return out
|
445 |
+
|
446 |
+
|
447 |
+
class FusionBlock(nn.Module):
|
448 |
+
def __init__(self, channels_in, channels_out):
|
449 |
+
super(FusionBlock, self).__init__()
|
450 |
+
self.conv1 = default_conv(channels_in, channels_in // 4, 1)
|
451 |
+
self.conv2 = default_conv(channels_in, channels_in // 4, 1)
|
452 |
+
self.conv3 = default_conv(channels_in // 4, channels_in, 1)
|
453 |
+
self.sigmoid = nn.Sigmoid()
|
454 |
+
|
455 |
+
self.conv = default_conv(2 * channels_in, channels_out, 3)
|
456 |
+
|
457 |
+
def forward(self, rgb, dep, inter):
|
458 |
+
inter1 = self.conv1(inter)
|
459 |
+
rgb1 = self.conv2(rgb)
|
460 |
+
|
461 |
+
w = torch.sigmoid(inter1)
|
462 |
+
rgb2 = rgb1 * w
|
463 |
+
rgb3 = self.conv3(rgb2) + rgb
|
464 |
+
cat1 = torch.cat([rgb3, dep], dim=1)
|
465 |
+
out = self.conv(cat1)
|
466 |
+
|
467 |
+
return out
|
468 |
+
|
469 |
+
|
470 |
+
class DOFT(nn.Module):
|
471 |
+
def __init__(self, channels_in, channels_out, kernel_size, reduction):
|
472 |
+
super(DOFT, self).__init__()
|
473 |
+
self.channels_out = channels_out
|
474 |
+
self.channels_in = channels_in
|
475 |
+
self.kernel_size = kernel_size
|
476 |
+
|
477 |
+
self.DA_rgb = DA_rgb(channels_in, channels_out, kernel_size, reduction)
|
478 |
+
self.fb = FusionBlock(channels_in, channels_out)
|
479 |
+
|
480 |
+
self.relu = nn.LeakyReLU(0.1, True)
|
481 |
+
|
482 |
+
def forward(self, x, inter, rgb, fea):
|
483 |
+
rgb = self.DA_rgb(rgb, inter, fea)
|
484 |
+
|
485 |
+
out1 = self.fb(rgb, x, inter)
|
486 |
+
out = x + out1
|
487 |
+
return out
|
488 |
+
|
489 |
+
|
490 |
+
class DSRN(nn.Module):
|
491 |
+
def __init__(self, nfeats=64, reduction=16, conv=default_conv):
|
492 |
+
super(DSRN, self).__init__()
|
493 |
+
|
494 |
+
kernel_size = 3
|
495 |
+
|
496 |
+
n_feats = nfeats
|
497 |
+
|
498 |
+
# head module
|
499 |
+
modules_head = [conv(1, n_feats, kernel_size)]
|
500 |
+
self.head = nn.Sequential(*modules_head)
|
501 |
+
|
502 |
+
modules_head_rgb = [conv(3, n_feats, kernel_size)]
|
503 |
+
self.head_rgb = nn.Sequential(*modules_head_rgb)
|
504 |
+
|
505 |
+
self.dgm1 = DOFT(n_feats, n_feats, 3, reduction)
|
506 |
+
self.dgm2 = DOFT(n_feats, n_feats, 3, reduction)
|
507 |
+
self.dgm3 = DOFT(n_feats, n_feats, 3, reduction)
|
508 |
+
self.dgm4 = DOFT(n_feats, n_feats, 3, reduction)
|
509 |
+
self.dgm5 = DOFT(n_feats, n_feats, 3, reduction)
|
510 |
+
|
511 |
+
self.c_d1 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
512 |
+
self.c_d2 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
513 |
+
self.c_d3 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
514 |
+
self.c_d4 = ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)
|
515 |
+
|
516 |
+
modules_d5 = [conv(5 * n_feats, n_feats, 1),
|
517 |
+
ResidualGroup(conv, n_feats, 3, reduction=reduction, n_resblocks=2)]
|
518 |
+
self.c_d5 = nn.Sequential(*modules_d5)
|
519 |
+
|
520 |
+
self.c_r1 = conv(n_feats, n_feats, kernel_size)
|
521 |
+
self.c_r2 = conv(n_feats, n_feats, kernel_size)
|
522 |
+
self.c_r3 = conv(n_feats, n_feats, kernel_size)
|
523 |
+
self.c_r4 = conv(n_feats, n_feats, kernel_size)
|
524 |
+
|
525 |
+
self.act = nn.LeakyReLU(0.1, True)
|
526 |
+
|
527 |
+
# tail
|
528 |
+
modules_tail = [conv(n_feats, 1, kernel_size)]
|
529 |
+
self.tail = nn.Sequential(*modules_tail)
|
530 |
+
|
531 |
+
def forward(self, x, inter, rgb, fea):
|
532 |
+
# head
|
533 |
+
x = self.head(x)
|
534 |
+
rgb = self.head_rgb(rgb)
|
535 |
+
rgb1 = self.c_r1(rgb)
|
536 |
+
rgb2 = self.c_r2(self.act(rgb1))
|
537 |
+
rgb3 = self.c_r3(self.act(rgb2))
|
538 |
+
rgb4 = self.c_r4(self.act(rgb3))
|
539 |
+
|
540 |
+
dep10 = self.dgm1(x, inter, rgb, fea)
|
541 |
+
dep1 = self.c_d1(dep10)
|
542 |
+
dep20 = self.dgm2(dep1, inter, rgb1, fea)
|
543 |
+
dep2 = self.c_d2(self.act(dep20))
|
544 |
+
dep30 = self.dgm3(dep2, inter, rgb2, fea)
|
545 |
+
dep3 = self.c_d3(self.act(dep30))
|
546 |
+
dep40 = self.dgm4(dep3, inter, rgb3, fea)
|
547 |
+
dep4 = self.c_d4(self.act(dep40))
|
548 |
+
dep50 = self.dgm5(dep4, inter, rgb4, fea)
|
549 |
+
|
550 |
+
cat1 = torch.cat([dep1, dep2, dep3, dep4, dep50], dim=1)
|
551 |
+
dep6 = self.c_d5(cat1)
|
552 |
+
|
553 |
+
res = dep6 + x
|
554 |
+
|
555 |
+
out = self.tail(res)
|
556 |
+
|
557 |
+
return out
|
558 |
+
|
559 |
+
class SRN(nn.Module):
|
560 |
+
def __init__(self, nfeats, reduction):
|
561 |
+
super(SRN, self).__init__()
|
562 |
+
|
563 |
+
# Restorer
|
564 |
+
self.R = DSRN(nfeats=nfeats, reduction=reduction)
|
565 |
+
|
566 |
+
# Encoder
|
567 |
+
self.Enc = DaEncoder(nfeats=nfeats)
|
568 |
+
|
569 |
+
def forward(self, x_query, rgb):
|
570 |
+
|
571 |
+
fea, d_kernel, inter = self.Enc(x_query)
|
572 |
+
restored = self.R(x_query, inter, rgb, fea)
|
573 |
+
|
574 |
+
return restored, d_kernel
|
575 |
+
|
576 |
+
|
577 |
+
class Net_ddp(nn.Module):
|
578 |
+
def __init__(self, tiny_model=False):
|
579 |
+
super(Net_ddp, self).__init__()
|
580 |
+
|
581 |
+
if tiny_model:
|
582 |
+
n_feats = 24
|
583 |
+
reduction = 4
|
584 |
+
else:
|
585 |
+
n_feats = 64
|
586 |
+
reduction = 16
|
587 |
+
|
588 |
+
self.srn = SRN(nfeats=n_feats, reduction=reduction)
|
589 |
+
self.Dab = DR(nfeats=n_feats)
|
590 |
+
|
591 |
+
self.CLLoss = ContrastLoss(ablation=False)
|
592 |
+
|
593 |
+
def forward(self, x_query, rgb):
|
594 |
+
|
595 |
+
restored, d_kernel = self.srn(x_query, rgb)
|
596 |
+
|
597 |
+
d_lr_, aux_loss = self.Dab(x_query,restored, d_kernel)
|
598 |
+
CLLoss1 = self.CLLoss(d_lr_, x_query, restored)
|
599 |
+
|
600 |
+
return restored, d_lr_, aux_loss, CLLoss1
|
test_img.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import os
|
3 |
+
import torch
|
4 |
+
import cv2
|
5 |
+
from net.dornet import Net
|
6 |
+
from data.rgbdd_dataloader import *
|
7 |
+
from PIL import Image
|
8 |
+
import torchvision.transforms as transforms
|
9 |
+
|
10 |
+
# RGBDD_Dataset data process
|
11 |
+
|
12 |
+
# S1: load data
|
13 |
+
|
14 |
+
lr_path = r"C:\Users\wuyuan\Downloads/model_out.png"
|
15 |
+
|
16 |
+
lr_pil = Image.open(lr_path)
|
17 |
+
import pdb;pdb.set_trace()
|
test_img/RGB-D-D/20200518160957_LR_fill_depth.png
ADDED
![]() |
test_img/RGB-D-D/20200518160957_RGB.jpg
ADDED
![]() |
test_img/TOFDSR/2020_09_08_13_59_59_435_rgb_depth_crop_fill.png
ADDED
![]() |
test_img/TOFDSR/2020_09_08_13_59_59_435_rgb_rgb_crop.png
ADDED
![]() |
Git LFS Details
|
test_nyu_rgbdd.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
|
3 |
+
from utils import *
|
4 |
+
import torchvision.transforms as transforms
|
5 |
+
|
6 |
+
from net.dornet import Net
|
7 |
+
from torch.utils.data import Dataset, DataLoader
|
8 |
+
from data.nyu_dataloader import *
|
9 |
+
from data.rgbdd_dataloader import *
|
10 |
+
# from data.tofsr_dataloader import *
|
11 |
+
|
12 |
+
import os
|
13 |
+
|
14 |
+
import torch
|
15 |
+
|
16 |
+
parser = argparse.ArgumentParser()
|
17 |
+
parser.add_argument('--scale', type=int, default=4, help='scale factor')
|
18 |
+
parser.add_argument("--root_dir", type=str, default='./dataset/RGB-D-D', help="root dir of dataset")
|
19 |
+
parser.add_argument("--model_dir", type=str, default="./checkpoints/RGBDD.pth", help="path of net")
|
20 |
+
parser.add_argument("--results_dir", type=str, default='./results/', help="root dir of results")
|
21 |
+
parser.add_argument('--tiny_model', action='store_true', help='tiny model')
|
22 |
+
parser.add_argument("--blur_sigma", type=int, default=3.6, help="blur_sigma")
|
23 |
+
parser.add_argument('--isNoisy', action='store_true', help='Noisy')
|
24 |
+
|
25 |
+
opt = parser.parse_args()
|
26 |
+
|
27 |
+
net = Net(tiny_model=True).cuda()
|
28 |
+
|
29 |
+
print("*********************************************")
|
30 |
+
print(sum(p.numel() for p in net.parameters() if p.requires_grad))
|
31 |
+
print("*********************************************")
|
32 |
+
net.load_state_dict(torch.load(opt.model_dir, map_location='cuda:0'))
|
33 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
34 |
+
net.to(device)
|
35 |
+
|
36 |
+
data_transform = transforms.Compose([transforms.ToTensor()])
|
37 |
+
|
38 |
+
dataset_name = opt.root_dir.split('/')[-1]
|
39 |
+
|
40 |
+
if dataset_name == 'RGB-D-D':
|
41 |
+
dataset = RGBDD_Dataset(root_dir=opt.root_dir, scale=opt.scale, downsample='real', train=False,
|
42 |
+
transform=data_transform, isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
43 |
+
rmse = np.zeros(405)
|
44 |
+
elif dataset_name == 'NYU-v2':
|
45 |
+
dataset = NYU_v2_datset(root_dir=opt.root_dir, scale=opt.scale, transform=data_transform, train=False)
|
46 |
+
test_minmax = np.load('%s/test_minmax.npy' % opt.root_dir)
|
47 |
+
rmse = np.zeros(449)
|
48 |
+
|
49 |
+
dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=8)
|
50 |
+
data_num = len(dataloader)
|
51 |
+
|
52 |
+
with torch.no_grad():
|
53 |
+
net.eval()
|
54 |
+
if dataset_name == 'RGB-D-D':
|
55 |
+
for idx, data in enumerate(dataloader):
|
56 |
+
guidance, lr, gt, maxx, minn, name = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda(), data[
|
57 |
+
'max'].cuda(), data['min'].cuda(), data['name']
|
58 |
+
out = net(x_query=lr, rgb=guidance)
|
59 |
+
rmse[idx] = rgbdd_calc_rmse(gt[0, 0], out[0, 0], [maxx, minn])
|
60 |
+
|
61 |
+
# Save results (Save the output depth map)
|
62 |
+
# path_output = '{}/output'.format(opt.results_dir)
|
63 |
+
# os.makedirs(path_output, exist_ok=True)
|
64 |
+
# path_save_pred = '{}/{}.png'.format(path_output, name[0])
|
65 |
+
|
66 |
+
# pred = out[0, 0] * (maxx - minn) + minn
|
67 |
+
# pred = pred.cpu().detach().numpy()
|
68 |
+
# pred = pred.astype(np.uint16)
|
69 |
+
# pred = Image.fromarray(pred)
|
70 |
+
# pred.save(path_save_pred)
|
71 |
+
|
72 |
+
print('idx:%d RMSE:%f' % (idx + 1, rmse[idx]))
|
73 |
+
print("==========RGB-D-D=========")
|
74 |
+
print(rmse.mean())
|
75 |
+
print("==========RGB-D-D=========")
|
76 |
+
elif dataset_name == 'NYU-v2':
|
77 |
+
# t = np.zeros(449)
|
78 |
+
for idx, data in enumerate(dataloader):
|
79 |
+
guidance, lr, gt = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda()
|
80 |
+
out = net(x_query=lr, rgb=guidance)
|
81 |
+
|
82 |
+
minmax = test_minmax[:, idx]
|
83 |
+
minmax = torch.from_numpy(minmax).cuda()
|
84 |
+
rmse[idx] = calc_rmse(gt[0, 0], out[0, 0], minmax)
|
85 |
+
|
86 |
+
# Save results (Save the output depth map)
|
87 |
+
# path_output = '{}/output'.format(opt.results_dir)
|
88 |
+
# os.makedirs(path_output, exist_ok=True)
|
89 |
+
# path_save_pred = '{}/{:010d}.png'.format(path_output, idx)
|
90 |
+
|
91 |
+
# pred = out[0,0] * (minmax[0] - minmax[1]) + minmax[1]
|
92 |
+
# pred = pred * 1000.0
|
93 |
+
# pred = pred.cpu().detach().numpy()
|
94 |
+
# pred = pred.astype(np.uint16)
|
95 |
+
# pred = Image.fromarray(pred)
|
96 |
+
# pred.save(path_save_pred)
|
97 |
+
|
98 |
+
print('idx:%d RMSE:%f' % (idx + 1, rmse[idx]))
|
99 |
+
print("=========NYU-v2==========")
|
100 |
+
print(rmse.mean())
|
101 |
+
print("=========NYU-v2==========")
|
102 |
+
|
103 |
+
|
test_tofdsr.py
ADDED
@@ -0,0 +1,66 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
import numpy as np
|
3 |
+
from utils import *
|
4 |
+
import torchvision.transforms as transforms
|
5 |
+
|
6 |
+
from net.dornet_ddp import Net
|
7 |
+
|
8 |
+
from data.tofdc_dataloader import *
|
9 |
+
|
10 |
+
import os
|
11 |
+
|
12 |
+
import torch
|
13 |
+
|
14 |
+
parser = argparse.ArgumentParser()
|
15 |
+
parser.add_argument('--scale', type=int, default=4, help='scale factor')
|
16 |
+
parser.add_argument("--root_dir", type=str, default='/opt/data/private/dataset', help="root dir of dataset")
|
17 |
+
parser.add_argument("--model_dir", type=str, default="./checkpoints/TOFDSR.pth", help="path of net")
|
18 |
+
parser.add_argument("--results_dir", type=str, default='./results/', help="root dir of results")
|
19 |
+
parser.add_argument('--tiny_model', action='store_true', help='tiny model')
|
20 |
+
parser.add_argument("--blur_sigma", type=int, default=3.6, help="blur_sigma")
|
21 |
+
parser.add_argument('--isNoisy', action='store_true', help='Noisy')
|
22 |
+
|
23 |
+
opt = parser.parse_args()
|
24 |
+
|
25 |
+
net = Net(tiny_model=opt.tiny_model).srn.cuda()
|
26 |
+
|
27 |
+
net.load_state_dict(torch.load(opt.model_dir, map_location='cuda:0'))
|
28 |
+
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
|
29 |
+
net.to(device)
|
30 |
+
|
31 |
+
data_transform = transforms.Compose([transforms.ToTensor()])
|
32 |
+
|
33 |
+
dataset_name = opt.root_dir.split('/')[-1]
|
34 |
+
|
35 |
+
dataset = TOFDSR_Dataset(root_dir=opt.root_dir, train=False, txt_file="./data/TOFDSR_Test.txt", transform=data_transform, isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
36 |
+
dataloader = DataLoader(dataset, batch_size=1, shuffle=False, num_workers=8)
|
37 |
+
|
38 |
+
data_num = len(dataloader)
|
39 |
+
rmse = np.zeros(data_num)
|
40 |
+
|
41 |
+
with torch.no_grad():
|
42 |
+
net.eval()
|
43 |
+
|
44 |
+
for idx, data in enumerate(dataloader):
|
45 |
+
guidance, lr, gt, maxx, minn, name = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda(), data[
|
46 |
+
'max'].cuda(), data['min'].cuda(), data['name']
|
47 |
+
out, _ = net(x_query=lr, rgb=guidance)
|
48 |
+
rmse[idx] = tofdsr_calc_rmse(gt[0, 0], out[0, 0], [maxx, minn])
|
49 |
+
|
50 |
+
# Save results (Save the output depth map)
|
51 |
+
# path_output = '{}/output'.format(opt.results_dir)
|
52 |
+
# os.makedirs(path_output, exist_ok=True)
|
53 |
+
# path_save_pred = '{}/{}.png'.format(path_output, name[0])
|
54 |
+
|
55 |
+
# pred = out[0, 0] * (maxx - minn) + minn
|
56 |
+
# pred = pred.cpu().detach().numpy()
|
57 |
+
# pred = pred.astype(np.uint16)
|
58 |
+
# pred = Image.fromarray(pred)
|
59 |
+
# pred.save(path_save_pred)
|
60 |
+
|
61 |
+
print('idx:%d RMSE:%f' % (idx + 1, rmse[idx]))
|
62 |
+
print("=========TOFDSR==========")
|
63 |
+
print(rmse.mean())
|
64 |
+
print("=========TOFDSR==========")
|
65 |
+
|
66 |
+
|
train_nyu_rgbdd.py
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
from net.dornet import Net
|
3 |
+
from net.CR import *
|
4 |
+
|
5 |
+
from data.rgbdd_dataloader import *
|
6 |
+
from data.nyu_dataloader import *
|
7 |
+
|
8 |
+
from utils import calc_rmse, rgbdd_calc_rmse
|
9 |
+
|
10 |
+
from torch.utils.data import Dataset
|
11 |
+
from torchvision import transforms, utils
|
12 |
+
import torch
|
13 |
+
import torch.optim as optim
|
14 |
+
import torch.nn as nn
|
15 |
+
|
16 |
+
from tqdm import tqdm
|
17 |
+
import logging
|
18 |
+
from datetime import datetime
|
19 |
+
import os
|
20 |
+
|
21 |
+
import numpy as np
|
22 |
+
|
23 |
+
parser = argparse.ArgumentParser()
|
24 |
+
parser.add_argument('--scale', type=int, default=4, help='scale factor')
|
25 |
+
parser.add_argument('--lr', default='0.0001', type=float, help='learning rate')
|
26 |
+
parser.add_argument('--result', default='experiment', help='learning rate')
|
27 |
+
parser.add_argument('--tiny_model', action='store_true', help='tiny model')
|
28 |
+
parser.add_argument('--epoch', default=300, type=int, help='max epoch')
|
29 |
+
parser.add_argument("--decay_iterations", type=list, default=[1.2e5, 2e5, 3.6e5],
|
30 |
+
help="steps to start lr decay")
|
31 |
+
parser.add_argument("--gamma", type=float, default=0.2, help="decay rate of learning rate")
|
32 |
+
parser.add_argument("--root_dir", type=str, default='./dataset/RGB-D-D', help="root dir of dataset")
|
33 |
+
parser.add_argument("--batch_size", type=int, default=3, help="batch_size of training dataloader")
|
34 |
+
parser.add_argument("--blur_sigma", type=int, default=3.6, help="blur_sigma")
|
35 |
+
parser.add_argument('--isNoisy', action='store_true', help='Noisy')
|
36 |
+
|
37 |
+
opt = parser.parse_args()
|
38 |
+
print(opt)
|
39 |
+
|
40 |
+
s = datetime.now().strftime('%Y%m%d%H%M%S')
|
41 |
+
dataset_name = opt.root_dir.split('/')[-1]
|
42 |
+
result_root = '%s/%s-lr_%s-s_%s-%s-b_%s' % (opt.result, s, opt.lr, opt.scale, dataset_name, opt.batch_size)
|
43 |
+
if not os.path.exists(result_root):
|
44 |
+
os.mkdir(result_root)
|
45 |
+
|
46 |
+
logging.basicConfig(filename='%s/train.log' % result_root, format='%(asctime)s %(message)s', level=logging.INFO)
|
47 |
+
logging.info(opt)
|
48 |
+
|
49 |
+
net = Net(tiny_model=opt.tiny_model).cuda()
|
50 |
+
|
51 |
+
print("**********************Parameters***********************")
|
52 |
+
print(sum(p.numel() for p in net.parameters() if p.requires_grad))
|
53 |
+
print("**********************Parameters***********************")
|
54 |
+
net.train()
|
55 |
+
|
56 |
+
optimizer = optim.Adam(net.parameters(), lr=opt.lr)
|
57 |
+
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=opt.decay_iterations, gamma=opt.gamma)
|
58 |
+
|
59 |
+
CL = ContrastLoss(ablation=False)
|
60 |
+
l1 = nn.L1Loss().cuda()
|
61 |
+
|
62 |
+
data_transform = transforms.Compose([transforms.ToTensor()])
|
63 |
+
|
64 |
+
|
65 |
+
if dataset_name == 'RGB-D-D':
|
66 |
+
train_dataset = RGBDD_Dataset(root_dir=opt.root_dir, scale=opt.scale, downsample='real', train=True,
|
67 |
+
transform=data_transform, isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
68 |
+
test_dataset = RGBDD_Dataset(root_dir=opt.root_dir, scale=opt.scale, downsample='real', train=False,
|
69 |
+
transform=data_transform, isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
70 |
+
|
71 |
+
elif dataset_name == 'NYU-v2':
|
72 |
+
test_minmax = np.load('%s/test_minmax.npy' % opt.root_dir)
|
73 |
+
train_dataset = NYU_v2_datset(root_dir=opt.root_dir, scale=opt.scale, transform=data_transform, train=True)
|
74 |
+
test_dataset = NYU_v2_datset(root_dir=opt.root_dir, scale=opt.scale, transform=data_transform, train=False)
|
75 |
+
|
76 |
+
|
77 |
+
train_dataloader = torch.utils.data.DataLoader(train_dataset, batch_size=opt.batch_size, shuffle=True, num_workers=8)
|
78 |
+
test_dataloader = torch.utils.data.DataLoader(test_dataset, batch_size=1, shuffle=False, num_workers=8)
|
79 |
+
|
80 |
+
max_epoch = opt.epoch
|
81 |
+
num_train = len(train_dataloader)
|
82 |
+
best_rmse = 100.0
|
83 |
+
best_epoch = 0
|
84 |
+
for epoch in range(max_epoch):
|
85 |
+
# ---------
|
86 |
+
# Training
|
87 |
+
# ---------
|
88 |
+
net.train()
|
89 |
+
running_loss = 0.0
|
90 |
+
|
91 |
+
t = tqdm(iter(train_dataloader), leave=True, total=len(train_dataloader))
|
92 |
+
|
93 |
+
for idx, data in enumerate(t):
|
94 |
+
batches_done = num_train * epoch + idx
|
95 |
+
optimizer.zero_grad()
|
96 |
+
guidance, lr, gt = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda()
|
97 |
+
|
98 |
+
restored, d_lr_, aux_loss = net(x_query=lr, rgb=guidance)
|
99 |
+
|
100 |
+
rec_loss = l1(restored, gt)
|
101 |
+
da_loss = l1(d_lr_, lr)
|
102 |
+
cl_loss = CL(d_lr_,lr,restored)
|
103 |
+
loss = rec_loss + 0.1 * da_loss + 0.1 * cl_loss + aux_loss
|
104 |
+
|
105 |
+
loss.backward()
|
106 |
+
optimizer.step()
|
107 |
+
scheduler.step()
|
108 |
+
running_loss += loss.data.item()
|
109 |
+
|
110 |
+
t.set_description(
|
111 |
+
'[train epoch:%d] loss: Rec_loss:%.8f DA_loss:%.8f CL_loss:%.8f' % (epoch + 1, rec_loss.item(), da_loss.item(), cl_loss.item()))
|
112 |
+
t.refresh()
|
113 |
+
|
114 |
+
logging.info('epoch:%d iteration:%d running_loss:%.10f' % (epoch + 1, batches_done + 1, running_loss / num_train))
|
115 |
+
|
116 |
+
|
117 |
+
# -----------
|
118 |
+
# Validating
|
119 |
+
# -----------
|
120 |
+
with torch.no_grad():
|
121 |
+
|
122 |
+
net.eval()
|
123 |
+
if dataset_name == 'RGB-D-D':
|
124 |
+
rmse = np.zeros(405)
|
125 |
+
elif dataset_name == 'NYU-v2':
|
126 |
+
rmse = np.zeros(449)
|
127 |
+
t = tqdm(iter(test_dataloader), leave=True, total=len(test_dataloader))
|
128 |
+
|
129 |
+
for idx, data in enumerate(t):
|
130 |
+
if dataset_name == 'RGB-D-D':
|
131 |
+
guidance, lr, gt, max, min = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda(), data[
|
132 |
+
'max'].cuda(), data['min'].cuda()
|
133 |
+
out = net(x_query=lr, rgb=guidance)
|
134 |
+
minmax = [max, min]
|
135 |
+
rmse[idx] = rgbdd_calc_rmse(gt[0, 0], out[0, 0], minmax)
|
136 |
+
t.set_description('[validate] rmse: %f' % rmse[:idx + 1].mean())
|
137 |
+
t.refresh()
|
138 |
+
elif dataset_name == 'NYU-v2':
|
139 |
+
guidance, lr, gt = data['guidance'].cuda(), data['lr'].cuda(), data['gt'].cuda()
|
140 |
+
out = net(x_query=lr, rgb=guidance)
|
141 |
+
minmax = test_minmax[:, idx]
|
142 |
+
minmax = torch.from_numpy(minmax).cuda()
|
143 |
+
rmse[idx] = calc_rmse(gt[0, 0], out[0, 0], minmax)
|
144 |
+
t.set_description('[validate] rmse: %f' % rmse[:idx + 1].mean())
|
145 |
+
t.refresh()
|
146 |
+
r_mean = rmse.mean()
|
147 |
+
if r_mean < best_rmse:
|
148 |
+
best_rmse = r_mean
|
149 |
+
best_epoch = epoch
|
150 |
+
torch.save(net.state_dict(),
|
151 |
+
os.path.join(result_root, "RMSE%f_8%d.pth" % (best_rmse, best_epoch + 1)))
|
152 |
+
logging.info(
|
153 |
+
'---------------------------------------------------------------------------------------------------------------------------')
|
154 |
+
logging.info('epoch:%d lr:%f-------mean_rmse:%f (BEST: %f @epoch%d)' % (
|
155 |
+
epoch + 1, scheduler.get_last_lr()[0], r_mean, best_rmse, best_epoch + 1))
|
156 |
+
logging.info(
|
157 |
+
'---------------------------------------------------------------------------------------------------------------------------')
|
158 |
+
|
train_tofdsr.py
ADDED
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
|
3 |
+
from net.dornet_ddp import Net
|
4 |
+
|
5 |
+
from data.tofsr_dataloader import *
|
6 |
+
from utils import tofdsr_calc_rmse
|
7 |
+
|
8 |
+
from torch.utils.data import Dataset, DataLoader
|
9 |
+
import torch.distributed as dist
|
10 |
+
from torch.nn.parallel import DistributedDataParallel
|
11 |
+
from torch.utils.data.distributed import DistributedSampler
|
12 |
+
from torchvision import transforms, utils
|
13 |
+
import torch.optim as optim
|
14 |
+
|
15 |
+
import random
|
16 |
+
|
17 |
+
from net.CR import *
|
18 |
+
from tqdm import tqdm
|
19 |
+
import logging
|
20 |
+
from datetime import datetime
|
21 |
+
import os
|
22 |
+
|
23 |
+
parser = argparse.ArgumentParser()
|
24 |
+
|
25 |
+
parser.add_argument("--local-rank", default=-1, type=int)
|
26 |
+
|
27 |
+
parser.add_argument('--scale', type=int, default=4, help='scale factor')
|
28 |
+
parser.add_argument('--lr', default='0.0002', type=float, help='learning rate') # 0.0001
|
29 |
+
parser.add_argument('--tiny_model', action='store_true', help='tiny model')
|
30 |
+
parser.add_argument('--epoch', default=300, type=int, help='max epoch')
|
31 |
+
parser.add_argument('--device', default="0,1", type=str, help='which gpu use')
|
32 |
+
parser.add_argument("--decay_iterations", type=list, default=[1.2e5, 2e5, 3.6e5],
|
33 |
+
help="steps to start lr decay")
|
34 |
+
parser.add_argument("--gamma", type=float, default=0.2, help="decay rate of learning rate")
|
35 |
+
parser.add_argument("--root_dir", type=str, default='./dataset/TOFDSR', help="root dir of dataset")
|
36 |
+
parser.add_argument("--batchsize", type=int, default=3, help="batchsize of training dataloader")
|
37 |
+
parser.add_argument("--num_gpus", type=int, default=2, help="num_gpus")
|
38 |
+
parser.add_argument('--seed', type=int, default=7240, help='random seed point')
|
39 |
+
parser.add_argument("--result_root", type=str, default='experiment/TOFDSR', help="root dir of dataset")
|
40 |
+
parser.add_argument("--blur_sigma", type=int, default=3.6, help="blur_sigma")
|
41 |
+
parser.add_argument('--isNoisy', action='store_true', help='Noisy')
|
42 |
+
|
43 |
+
opt = parser.parse_args()
|
44 |
+
print(opt)
|
45 |
+
|
46 |
+
os.environ["CUDA_VISIBLE_DEVICES"] = "0,1"
|
47 |
+
|
48 |
+
torch.manual_seed(opt.seed)
|
49 |
+
np.random.seed(opt.seed)
|
50 |
+
random.seed(opt.seed)
|
51 |
+
torch.cuda.manual_seed_all(opt.seed)
|
52 |
+
|
53 |
+
local_rank = int(os.environ["LOCAL_RANK"])
|
54 |
+
torch.cuda.set_device(local_rank)
|
55 |
+
dist.init_process_group(backend='nccl')
|
56 |
+
device = torch.device("cuda", local_rank)
|
57 |
+
|
58 |
+
s = datetime.now().strftime('%Y%m%d%H%M%S')
|
59 |
+
dataset_name = opt.root_dir.split('/')[-1]
|
60 |
+
|
61 |
+
rank = dist.get_rank()
|
62 |
+
|
63 |
+
logging.basicConfig(filename='%s/train.log' % opt.result_root, format='%(asctime)s %(message)s', level=logging.INFO)
|
64 |
+
logging.info(opt)
|
65 |
+
|
66 |
+
net = Net(tiny_model=opt.tiny_model).cuda()
|
67 |
+
|
68 |
+
data_transform = transforms.Compose([transforms.ToTensor()])
|
69 |
+
|
70 |
+
train_dataset = TOFDSR_Dataset(root_dir=opt.root_dir, train=True, txt_file="./data/TOFDSR_Train.txt", transform=data_transform,
|
71 |
+
isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
72 |
+
test_dataset = TOFDSR_Dataset(root_dir=opt.root_dir, train=False, txt_file="./data/TOFDSR_Test.txt", transform=data_transform,
|
73 |
+
isNoisy=opt.isNoisy, blur_sigma=opt.blur_sigma)
|
74 |
+
|
75 |
+
if torch.cuda.device_count() > 1:
|
76 |
+
train_sampler = DistributedSampler(dataset=train_dataset)
|
77 |
+
train_dataloader = DataLoader(train_dataset, batch_size=opt.batchsize, shuffle=False, pin_memory=True, num_workers=8,
|
78 |
+
drop_last=True, sampler=train_sampler)
|
79 |
+
test_dataloader = DataLoader(test_dataset, batch_size=1, shuffle=False, pin_memory=True, num_workers=8)
|
80 |
+
|
81 |
+
net = DistributedDataParallel(net, device_ids=[local_rank], output_device=int(local_rank), find_unused_parameters=True)
|
82 |
+
|
83 |
+
l1 = nn.L1Loss().to(device)
|
84 |
+
|
85 |
+
optimizer = optim.Adam(net.module.parameters(), lr=opt.lr)
|
86 |
+
scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=opt.decay_iterations, gamma=opt.gamma)
|
87 |
+
net.train()
|
88 |
+
|
89 |
+
max_epoch = opt.epoch
|
90 |
+
num_train = len(train_dataloader)
|
91 |
+
best_rmse = 100.0
|
92 |
+
best_epoch = 0
|
93 |
+
for epoch in range(max_epoch):
|
94 |
+
# ---------
|
95 |
+
# Training
|
96 |
+
# ---------
|
97 |
+
train_sampler.set_epoch(epoch)
|
98 |
+
net.train()
|
99 |
+
running_loss = 0.0
|
100 |
+
|
101 |
+
t = tqdm(iter(train_dataloader), leave=True, total=len(train_dataloader))
|
102 |
+
|
103 |
+
for idx, data in enumerate(t):
|
104 |
+
batches_done = num_train * epoch + idx
|
105 |
+
optimizer.zero_grad()
|
106 |
+
guidance, lr, gt = data['guidance'].to(device), data['lr'].to(device), data['gt'].to(device)
|
107 |
+
|
108 |
+
restored, d_lr_, aux_loss, cl_loss = net(x_query=lr, rgb=guidance)
|
109 |
+
|
110 |
+
mask = (gt >= 0.02) & (gt <= 1)
|
111 |
+
gt = gt[mask]
|
112 |
+
restored = restored[mask]
|
113 |
+
lr = lr[mask]
|
114 |
+
d_lr_ = d_lr_[mask]
|
115 |
+
|
116 |
+
rec_loss = l1(restored, gt)
|
117 |
+
da_loss = l1(d_lr_, lr)
|
118 |
+
|
119 |
+
loss = rec_loss + 0.1 * da_loss + 0.1 * cl_loss + aux_loss
|
120 |
+
loss.backward()
|
121 |
+
optimizer.step()
|
122 |
+
scheduler.step()
|
123 |
+
running_loss += loss.data.item()
|
124 |
+
running_loss_50 = running_loss
|
125 |
+
|
126 |
+
if idx % 50 == 0:
|
127 |
+
running_loss_50 /= 50
|
128 |
+
t.set_description(
|
129 |
+
'[train epoch:%d] loss: Rec_loss:%.8f DA_loss:%.8f CL_loss:%.8f' % (
|
130 |
+
epoch + 1, rec_loss.item(), da_loss.item(), cl_loss.item()))
|
131 |
+
t.refresh()
|
132 |
+
|
133 |
+
logging.info('epoch:%d iteration:%d running_loss:%.10f' % (epoch + 1, batches_done + 1, running_loss / num_train))
|
134 |
+
|
135 |
+
# -----------
|
136 |
+
# Validating
|
137 |
+
# -----------
|
138 |
+
if rank == 0:
|
139 |
+
with torch.no_grad():
|
140 |
+
|
141 |
+
net.eval()
|
142 |
+
rmse = np.zeros(560)
|
143 |
+
t = tqdm(iter(test_dataloader), leave=True, total=len(test_dataloader))
|
144 |
+
|
145 |
+
for idx, data in enumerate(t):
|
146 |
+
guidance, lr, gt, maxx, minn = data['guidance'].to(device), data['lr'].to(device), data['gt'].to(
|
147 |
+
device), data[
|
148 |
+
'max'].to(device), data['min'].to(device)
|
149 |
+
out, _ = net.module.srn(x_query=lr, rgb=guidance)
|
150 |
+
minmax = [maxx, minn]
|
151 |
+
rmse[idx] = tofdsr_calc_rmse(gt[0, 0], out[0, 0], minmax)
|
152 |
+
t.set_description('[validate] rmse: %f' % rmse[:idx + 1].mean())
|
153 |
+
t.refresh()
|
154 |
+
|
155 |
+
r_mean = rmse.mean()
|
156 |
+
if r_mean < best_rmse:
|
157 |
+
best_rmse = r_mean
|
158 |
+
best_epoch = epoch
|
159 |
+
torch.save(net.module.srn.state_dict(),
|
160 |
+
os.path.join(opt.result_root, "RMSE%f_8%d.pth" % (best_rmse, best_epoch + 1)))
|
161 |
+
logging.info(
|
162 |
+
'---------------------------------------------------------------------------------------------------------------------------')
|
163 |
+
logging.info('epoch:%d lr:%f-------mean_rmse:%f (BEST: %f @epoch%d)' % (
|
164 |
+
epoch + 1, scheduler.get_last_lr()[0], r_mean, best_rmse, best_epoch + 1))
|
165 |
+
logging.info(
|
166 |
+
'---------------------------------------------------------------------------------------------------------------------------')
|
167 |
+
|
utils.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import torch
|
2 |
+
|
3 |
+
def calc_rmse(a, b, minmax):
|
4 |
+
a = a[6:-6, 6:-6]
|
5 |
+
b = b[6:-6, 6:-6]
|
6 |
+
|
7 |
+
a = a*(minmax[0]-minmax[1]) + minmax[1]
|
8 |
+
b = b*(minmax[0]-minmax[1]) + minmax[1]
|
9 |
+
a = a * 100
|
10 |
+
b = b * 100
|
11 |
+
|
12 |
+
return torch.sqrt(torch.mean(torch.pow(a-b,2)))
|
13 |
+
|
14 |
+
|
15 |
+
def rgbdd_calc_rmse(gt, out, minmax):
|
16 |
+
gt = gt[6:-6, 6:-6]
|
17 |
+
out = out[6:-6, 6:-6]
|
18 |
+
|
19 |
+
out = out*(minmax[0]-minmax[1]) + minmax[1]
|
20 |
+
gt = gt / 10.0
|
21 |
+
out = out / 10.0
|
22 |
+
|
23 |
+
return torch.sqrt(torch.mean(torch.pow(gt-out,2)))
|
24 |
+
|
25 |
+
def tofdsr_calc_rmse(gt, out, minmax):
|
26 |
+
gt = gt[6:-6, 6:-6]
|
27 |
+
out = out[6:-6, 6:-6]
|
28 |
+
|
29 |
+
mask = (gt >= 100) & (gt <= 5000)
|
30 |
+
gt = gt[mask]
|
31 |
+
out = out[mask]
|
32 |
+
|
33 |
+
out = out*(minmax[0]-minmax[1]) + minmax[1]
|
34 |
+
gt = gt / 10.0
|
35 |
+
out = out / 10.0
|
36 |
+
|
37 |
+
return torch.sqrt(torch.mean(torch.pow(gt-out,2)))
|