Changes
On December 3, 2024 at 12:01:34 AM UTC, admin:
-
Added resource Original Metadata to NYU-Depth V2
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Seungyeop Lee", | 3 | "author": "Seungyeop Lee", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [ | 5 | "citation": [ | ||
6 | "https://doi.org/10.48550/arXiv.2309.01624" | 6 | "https://doi.org/10.48550/arXiv.2309.01624" | ||
7 | ], | 7 | ], | ||
8 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 8 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
9 | "defined_in": "https://doi.org/10.48550/arXiv.2203.10856", | 9 | "defined_in": "https://doi.org/10.48550/arXiv.2203.10856", | ||
10 | "doi": "10.57702/wjfr6tqu", | 10 | "doi": "10.57702/wjfr6tqu", | ||
11 | "doi_date_published": "2024-12-02", | 11 | "doi_date_published": "2024-12-02", | ||
12 | "doi_publisher": "TIB", | 12 | "doi_publisher": "TIB", | ||
13 | "doi_status": true, | 13 | "doi_status": true, | ||
14 | "domain": "https://service.tib.eu/ldmservice", | 14 | "domain": "https://service.tib.eu/ldmservice", | ||
15 | "extra_authors": [ | 15 | "extra_authors": [ | ||
16 | { | 16 | { | ||
17 | "extra_author": "Knut Peterson", | 17 | "extra_author": "Knut Peterson", | ||
18 | "orcid": "" | 18 | "orcid": "" | ||
19 | }, | 19 | }, | ||
20 | { | 20 | { | ||
21 | "extra_author": "Solmaz Arezoomandan", | 21 | "extra_author": "Solmaz Arezoomandan", | ||
22 | "orcid": "" | 22 | "orcid": "" | ||
23 | }, | 23 | }, | ||
24 | { | 24 | { | ||
25 | "extra_author": "Bill Cai", | 25 | "extra_author": "Bill Cai", | ||
26 | "orcid": "" | 26 | "orcid": "" | ||
27 | }, | 27 | }, | ||
28 | { | 28 | { | ||
29 | "extra_author": "Peihan Li", | 29 | "extra_author": "Peihan Li", | ||
30 | "orcid": "" | 30 | "orcid": "" | ||
31 | }, | 31 | }, | ||
32 | { | 32 | { | ||
33 | "extra_author": "Lifeng Zhou", | 33 | "extra_author": "Lifeng Zhou", | ||
34 | "orcid": "" | 34 | "orcid": "" | ||
35 | }, | 35 | }, | ||
36 | { | 36 | { | ||
37 | "extra_author": "David Han", | 37 | "extra_author": "David Han", | ||
38 | "orcid": "" | 38 | "orcid": "" | ||
39 | } | 39 | } | ||
40 | ], | 40 | ], | ||
41 | "groups": [ | 41 | "groups": [ | ||
42 | { | 42 | { | ||
43 | "description": "", | 43 | "description": "", | ||
44 | "display_name": "Benchmark", | 44 | "display_name": "Benchmark", | ||
45 | "id": "e3c09ae3-950f-4070-b72e-01b7846cae0f", | 45 | "id": "e3c09ae3-950f-4070-b72e-01b7846cae0f", | ||
46 | "image_display_url": "", | 46 | "image_display_url": "", | ||
47 | "name": "benchmark", | 47 | "name": "benchmark", | ||
48 | "title": "Benchmark" | 48 | "title": "Benchmark" | ||
49 | }, | 49 | }, | ||
50 | { | 50 | { | ||
51 | "description": "", | 51 | "description": "", | ||
52 | "display_name": "Computer Vision", | 52 | "display_name": "Computer Vision", | ||
53 | "id": "d09caf7c-26c7-4e4d-bb8e-49476a90ba25", | 53 | "id": "d09caf7c-26c7-4e4d-bb8e-49476a90ba25", | ||
54 | "image_display_url": "", | 54 | "image_display_url": "", | ||
55 | "name": "computer-vision", | 55 | "name": "computer-vision", | ||
56 | "title": "Computer Vision" | 56 | "title": "Computer Vision" | ||
57 | }, | 57 | }, | ||
58 | { | 58 | { | ||
59 | "description": "", | 59 | "description": "", | ||
60 | "display_name": "Depth Estimation", | 60 | "display_name": "Depth Estimation", | ||
61 | "id": "1cfc3f7a-9b2f-4ee9-9d15-9883618b3218", | 61 | "id": "1cfc3f7a-9b2f-4ee9-9d15-9883618b3218", | ||
62 | "image_display_url": "", | 62 | "image_display_url": "", | ||
63 | "name": "depth-estimation", | 63 | "name": "depth-estimation", | ||
64 | "title": "Depth Estimation" | 64 | "title": "Depth Estimation" | ||
65 | }, | 65 | }, | ||
66 | { | 66 | { | ||
67 | "description": "", | 67 | "description": "", | ||
68 | "display_name": "Image Segmentation", | 68 | "display_name": "Image Segmentation", | ||
69 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | 69 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | ||
70 | "image_display_url": "", | 70 | "image_display_url": "", | ||
71 | "name": "image-segmentation", | 71 | "name": "image-segmentation", | ||
72 | "title": "Image Segmentation" | 72 | "title": "Image Segmentation" | ||
73 | }, | 73 | }, | ||
74 | { | 74 | { | ||
75 | "description": "", | 75 | "description": "", | ||
76 | "display_name": "Indoor Segmentation", | 76 | "display_name": "Indoor Segmentation", | ||
77 | "id": "83a1234f-7903-418d-8bed-9e6f085caaa0", | 77 | "id": "83a1234f-7903-418d-8bed-9e6f085caaa0", | ||
78 | "image_display_url": "", | 78 | "image_display_url": "", | ||
79 | "name": "indoor-segmentation", | 79 | "name": "indoor-segmentation", | ||
80 | "title": "Indoor Segmentation" | 80 | "title": "Indoor Segmentation" | ||
81 | }, | 81 | }, | ||
82 | { | 82 | { | ||
83 | "description": "", | 83 | "description": "", | ||
84 | "display_name": "Monocular Depth Estimation", | 84 | "display_name": "Monocular Depth Estimation", | ||
85 | "id": "76f2b0b6-1665-4e60-9237-e546a2971c7c", | 85 | "id": "76f2b0b6-1665-4e60-9237-e546a2971c7c", | ||
86 | "image_display_url": "", | 86 | "image_display_url": "", | ||
87 | "name": "monocular-depth-estimation", | 87 | "name": "monocular-depth-estimation", | ||
88 | "title": "Monocular Depth Estimation" | 88 | "title": "Monocular Depth Estimation" | ||
89 | } | 89 | } | ||
90 | ], | 90 | ], | ||
91 | "id": "7130a369-6155-4226-8c39-a69df85ac729", | 91 | "id": "7130a369-6155-4226-8c39-a69df85ac729", | ||
92 | "isopen": false, | 92 | "isopen": false, | ||
93 | "landing_page": | 93 | "landing_page": | ||
94 | "https://www.cs.nyu.edu/~ylclab/research/datasets/nyu_depth_v2.html", | 94 | "https://www.cs.nyu.edu/~ylclab/research/datasets/nyu_depth_v2.html", | ||
95 | "license_title": null, | 95 | "license_title": null, | ||
96 | "link_orkg": "", | 96 | "link_orkg": "", | ||
97 | "metadata_created": "2024-12-02T23:10:51.098647", | 97 | "metadata_created": "2024-12-02T23:10:51.098647", | ||
n | 98 | "metadata_modified": "2024-12-03T00:01:32.381477", | n | 98 | "metadata_modified": "2024-12-03T00:01:33.453572", |
99 | "name": "nyu-depth-v2", | 99 | "name": "nyu-depth-v2", | ||
100 | "notes": "The NYU-Depth V2 dataset contains pairs of RGB and depth | 100 | "notes": "The NYU-Depth V2 dataset contains pairs of RGB and depth | ||
101 | images collected from Microsoft Kinect in 464 indoor scenes.", | 101 | images collected from Microsoft Kinect in 464 indoor scenes.", | ||
n | 102 | "num_resources": 0, | n | 102 | "num_resources": 1, |
103 | "num_tags": 9, | 103 | "num_tags": 9, | ||
104 | "organization": { | 104 | "organization": { | ||
105 | "approval_status": "approved", | 105 | "approval_status": "approved", | ||
106 | "created": "2024-11-25T12:11:38.292601", | 106 | "created": "2024-11-25T12:11:38.292601", | ||
107 | "description": "", | 107 | "description": "", | ||
108 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 108 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
109 | "image_url": "", | 109 | "image_url": "", | ||
110 | "is_organization": true, | 110 | "is_organization": true, | ||
111 | "name": "no-organization", | 111 | "name": "no-organization", | ||
112 | "state": "active", | 112 | "state": "active", | ||
113 | "title": "No Organization", | 113 | "title": "No Organization", | ||
114 | "type": "organization" | 114 | "type": "organization" | ||
115 | }, | 115 | }, | ||
116 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 116 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
117 | "private": false, | 117 | "private": false, | ||
118 | "relationships_as_object": [], | 118 | "relationships_as_object": [], | ||
119 | "relationships_as_subject": [], | 119 | "relationships_as_subject": [], | ||
t | 120 | "resources": [], | t | 120 | "resources": [ |
121 | { | ||||
122 | "cache_last_updated": null, | ||||
123 | "cache_url": null, | ||||
124 | "created": "2024-12-03T00:20:35", | ||||
125 | "data": [ | ||||
126 | "dcterms:title", | ||||
127 | "dcterms:accessRights", | ||||
128 | "dcterms:creator", | ||||
129 | "dcterms:description", | ||||
130 | "dcterms:issued", | ||||
131 | "dcterms:language", | ||||
132 | "dcterms:identifier", | ||||
133 | "dcat:theme", | ||||
134 | "dcterms:type", | ||||
135 | "dcat:keyword", | ||||
136 | "dcat:landingPage", | ||||
137 | "dcterms:hasVersion", | ||||
138 | "dcterms:format", | ||||
139 | "mls:task", | ||||
140 | "datacite:isDescribedBy" | ||||
141 | ], | ||||
142 | "description": "The json representation of the dataset with its | ||||
143 | distributions based on DCAT.", | ||||
144 | "format": "JSON", | ||||
145 | "hash": "", | ||||
146 | "id": "3f280e3b-2f9e-4434-9c01-2e92a5e846ed", | ||||
147 | "last_modified": "2024-12-03T00:01:33.444964", | ||||
148 | "metadata_modified": "2024-12-03T00:01:33.456437", | ||||
149 | "mimetype": "application/json", | ||||
150 | "mimetype_inner": null, | ||||
151 | "name": "Original Metadata", | ||||
152 | "package_id": "7130a369-6155-4226-8c39-a69df85ac729", | ||||
153 | "position": 0, | ||||
154 | "resource_type": null, | ||||
155 | "size": 1183, | ||||
156 | "state": "active", | ||||
157 | "url": | ||||
158 | resource/3f280e3b-2f9e-4434-9c01-2e92a5e846ed/download/metadata.json", | ||||
159 | "url_type": "upload" | ||||
160 | } | ||||
161 | ], | ||||
121 | "services_used_list": "", | 162 | "services_used_list": "", | ||
122 | "state": "active", | 163 | "state": "active", | ||
123 | "tags": [ | 164 | "tags": [ | ||
124 | { | 165 | { | ||
125 | "display_name": "Benchmark", | 166 | "display_name": "Benchmark", | ||
126 | "id": "70474eb4-f8bf-42f1-bf26-7511d4f3356c", | 167 | "id": "70474eb4-f8bf-42f1-bf26-7511d4f3356c", | ||
127 | "name": "Benchmark", | 168 | "name": "Benchmark", | ||
128 | "state": "active", | 169 | "state": "active", | ||
129 | "vocabulary_id": null | 170 | "vocabulary_id": null | ||
130 | }, | 171 | }, | ||
131 | { | 172 | { | ||
132 | "display_name": "Indoor Segmentation", | 173 | "display_name": "Indoor Segmentation", | ||
133 | "id": "873c9aba-d65e-4468-80f5-aabfe48d2c7d", | 174 | "id": "873c9aba-d65e-4468-80f5-aabfe48d2c7d", | ||
134 | "name": "Indoor Segmentation", | 175 | "name": "Indoor Segmentation", | ||
135 | "state": "active", | 176 | "state": "active", | ||
136 | "vocabulary_id": null | 177 | "vocabulary_id": null | ||
137 | }, | 178 | }, | ||
138 | { | 179 | { | ||
139 | "display_name": "Monocular Depth Estimation", | 180 | "display_name": "Monocular Depth Estimation", | ||
140 | "id": "00d9c951-f01d-4cfd-aa97-eb1efc4edbe6", | 181 | "id": "00d9c951-f01d-4cfd-aa97-eb1efc4edbe6", | ||
141 | "name": "Monocular Depth Estimation", | 182 | "name": "Monocular Depth Estimation", | ||
142 | "state": "active", | 183 | "state": "active", | ||
143 | "vocabulary_id": null | 184 | "vocabulary_id": null | ||
144 | }, | 185 | }, | ||
145 | { | 186 | { | ||
146 | "display_name": "NYU-Depth V2", | 187 | "display_name": "NYU-Depth V2", | ||
147 | "id": "5aaf1752-057a-40a0-b055-91bd2716db73", | 188 | "id": "5aaf1752-057a-40a0-b055-91bd2716db73", | ||
148 | "name": "NYU-Depth V2", | 189 | "name": "NYU-Depth V2", | ||
149 | "state": "active", | 190 | "state": "active", | ||
150 | "vocabulary_id": null | 191 | "vocabulary_id": null | ||
151 | }, | 192 | }, | ||
152 | { | 193 | { | ||
153 | "display_name": "RGB-D", | 194 | "display_name": "RGB-D", | ||
154 | "id": "b4388a5d-d31a-45b3-bd20-55ea8c48a7be", | 195 | "id": "b4388a5d-d31a-45b3-bd20-55ea8c48a7be", | ||
155 | "name": "RGB-D", | 196 | "name": "RGB-D", | ||
156 | "state": "active", | 197 | "state": "active", | ||
157 | "vocabulary_id": null | 198 | "vocabulary_id": null | ||
158 | }, | 199 | }, | ||
159 | { | 200 | { | ||
160 | "display_name": "Support Inference", | 201 | "display_name": "Support Inference", | ||
161 | "id": "6438f64f-06af-4ce7-9ff5-6a744b57c8ea", | 202 | "id": "6438f64f-06af-4ce7-9ff5-6a744b57c8ea", | ||
162 | "name": "Support Inference", | 203 | "name": "Support Inference", | ||
163 | "state": "active", | 204 | "state": "active", | ||
164 | "vocabulary_id": null | 205 | "vocabulary_id": null | ||
165 | }, | 206 | }, | ||
166 | { | 207 | { | ||
167 | "display_name": "depth estimation", | 208 | "display_name": "depth estimation", | ||
168 | "id": "3c08a798-cec3-4682-a668-4f95d6d8ad18", | 209 | "id": "3c08a798-cec3-4682-a668-4f95d6d8ad18", | ||
169 | "name": "depth estimation", | 210 | "name": "depth estimation", | ||
170 | "state": "active", | 211 | "state": "active", | ||
171 | "vocabulary_id": null | 212 | "vocabulary_id": null | ||
172 | }, | 213 | }, | ||
173 | { | 214 | { | ||
174 | "display_name": "image segmentation", | 215 | "display_name": "image segmentation", | ||
175 | "id": "7eaed78e-c73a-4929-a8c9-60265069f59a", | 216 | "id": "7eaed78e-c73a-4929-a8c9-60265069f59a", | ||
176 | "name": "image segmentation", | 217 | "name": "image segmentation", | ||
177 | "state": "active", | 218 | "state": "active", | ||
178 | "vocabulary_id": null | 219 | "vocabulary_id": null | ||
179 | }, | 220 | }, | ||
180 | { | 221 | { | ||
181 | "display_name": "indoor scenes", | 222 | "display_name": "indoor scenes", | ||
182 | "id": "7c39b0f6-b707-459a-9d7e-7e2625177b9f", | 223 | "id": "7c39b0f6-b707-459a-9d7e-7e2625177b9f", | ||
183 | "name": "indoor scenes", | 224 | "name": "indoor scenes", | ||
184 | "state": "active", | 225 | "state": "active", | ||
185 | "vocabulary_id": null | 226 | "vocabulary_id": null | ||
186 | } | 227 | } | ||
187 | ], | 228 | ], | ||
188 | "title": "NYU-Depth V2", | 229 | "title": "NYU-Depth V2", | ||
189 | "type": "dataset", | 230 | "type": "dataset", | ||
190 | "version": "" | 231 | "version": "" | ||
191 | } | 232 | } |