Changes
On December 2, 2024 at 5:51:43 PM UTC, admin:
-
Changed title to ADE20k (previously ADE20K)
-
Set author of ADE20k to Shanghua Gao (previously Bolei Zhou)
-
Updated description of ADE20k from
Semantic segmentation is a fundamental problem in computer vision. In the last years, thanks to the emergence of deep neural networks and to the availability of large-scale human-annotated datasets, the state of the art has improved significantly.
toSemantic segmentation is one of the fundamental prob-lems in computer vision, whose task is to assign a seman-tic label to each pixel of an image so that different classes can be distinguished.
-
Removed the following tags from ADE20k
- dense prediction
- natural scene
- Scene Parsing
- Object Detection
- Object Segmentation
- Depth Estimation
- ADE20K
- image dataset
- benchmark
- Instance Segmentation
- transformers
- image segmentation
- Image Matting
- object detection
- image classification
- semantic image segmentation
- urban scenes
- affordance
- Object Recognition
- Panoptic Segmentation
- scene understanding
- depth estimation
- general scene parsing
- Image segmentation
- Image Classification
- ADE20K dataset
- vision transformer
- segmentation
- Deep Learning
- Scene Segmentation
- scene parsing
- dataset
- Semantic segmentation
- ade20k
- densely annotated image
- Scene Understanding
- monocular
-
Added the following tags to ADE20k
-
Changed value of field
defined_in
tohttps://doi.org/10.48550/arXiv.2204.13101
in ADE20k -
Changed value of field
extra_authors
to[{'extra_author': 'Pan Zhou', 'orcid': ''}, {'extra_author': 'Ming-Ming Cheng', 'orcid': ''}, {'extra_author': 'Shuicheng Yan', 'orcid': ''}]
in ADE20k -
Changed value of field
citation
to['https://doi.org/10.48550/arXiv.2312.01987', 'https://doi.org/10.48550/arXiv.2212.02019', 'https://doi.org/10.1145/3581783.3612260', 'https://doi.org/10.48550/arXiv.1701.07122']
in ADE20k -
Deleted resource Original Metadata from ADE20k
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
n | 3 | "author": "Bolei Zhou", | n | 3 | "author": "Shanghua Gao", |
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [ | 5 | "citation": [ | ||
n | 6 | "https://doi.org/10.48550/arXiv.2112.01518", | n | ||
7 | "https://doi.org/10.48550/arXiv.2308.01127", | ||||
8 | "https://doi.org/10.48550/arXiv.2204.10384", | ||||
9 | "https://doi.org/10.48550/arXiv.1611.08986", | ||||
10 | "https://doi.org/10.48550/arXiv.2403.01326", | ||||
11 | "https://doi.org/10.48550/arXiv.2406.05850", | ||||
12 | "https://doi.org/10.48550/arXiv.1709.08872", | ||||
13 | "https://doi.org/10.48550/arXiv.2210.15621", | ||||
14 | "https://doi.org/10.48550/arXiv.2009.05205", | ||||
15 | "https://doi.org/10.48550/arXiv.2312.11872", | 6 | "https://doi.org/10.48550/arXiv.2312.01987", | ||
16 | "https://doi.org/10.48550/arXiv.1909.12526", | ||||
17 | "https://doi.org/10.48550/arXiv.2311.05988", | ||||
18 | "https://doi.org/10.48550/arXiv.2402.16674", | ||||
19 | "https://doi.org/10.1109/CVPRW50498.2020.00088", | ||||
20 | "https://doi.org/10.48550/arXiv.2403.05369", | ||||
21 | "https://doi.org/10.48550/arXiv.2002.00718", | 7 | "https://doi.org/10.48550/arXiv.2212.02019", | ||
22 | "https://doi.org/10.48550/arXiv.2210.05844", | 8 | "https://doi.org/10.1145/3581783.3612260", | ||
23 | "https://doi.org/10.48550/arXiv.1611.08061", | ||||
24 | "https://doi.org/10.48550/arXiv.1802.06117", | ||||
25 | "https://doi.org/10.48550/arXiv.2002.07371", | ||||
26 | "https://doi.org/10.48550/arXiv.1904.07642", | 9 | "https://doi.org/10.48550/arXiv.1701.07122" | ||
27 | "https://doi.org/10.48550/arXiv.2210.09996", | ||||
28 | "https://doi.org/10.48550/arXiv.2308.12894", | ||||
29 | "https://doi.org/10.48550/arXiv.2312.14733", | ||||
30 | "https://doi.org/10.48550/arXiv.2308.06739", | ||||
31 | "https://doi.org/10.48550/arXiv.2307.08198", | ||||
32 | "https://doi.org/10.48550/arXiv.2311.05707", | ||||
33 | "https://doi.org/10.48550/arXiv.2206.07706", | ||||
34 | "https://doi.org/10.48550/arXiv.2405.18240", | ||||
35 | "https://doi.org/10.48550/arXiv.2208.10043", | ||||
36 | "https://doi.org/10.48550/arXiv.2007.04269", | ||||
37 | "https://doi.org/10.48550/arXiv.2407.18559", | ||||
38 | "https://doi.org/10.48550/arXiv.2303.14608" | ||||
39 | ], | 10 | ], | ||
40 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 11 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
n | 41 | "defined_in": "https://doi.org/10.48550/arXiv.2011.11675", | n | 12 | "defined_in": "https://doi.org/10.48550/arXiv.2204.13101", |
42 | "doi": "10.57702/4nh8h4yr", | 13 | "doi": "10.57702/4nh8h4yr", | ||
43 | "doi_date_published": "2024-11-25", | 14 | "doi_date_published": "2024-11-25", | ||
44 | "doi_publisher": "TIB", | 15 | "doi_publisher": "TIB", | ||
45 | "doi_status": true, | 16 | "doi_status": true, | ||
46 | "domain": "https://service.tib.eu/ldmservice", | 17 | "domain": "https://service.tib.eu/ldmservice", | ||
47 | "extra_authors": [ | 18 | "extra_authors": [ | ||
48 | { | 19 | { | ||
n | 49 | "extra_author": "Hang Zhao", | n | 20 | "extra_author": "Pan Zhou", |
50 | "orcid": "" | 21 | "orcid": "" | ||
51 | }, | 22 | }, | ||
52 | { | 23 | { | ||
n | 53 | "extra_author": "Xavier Puig", | n | 24 | "extra_author": "Ming-Ming Cheng", |
54 | "orcid": "" | 25 | "orcid": "" | ||
55 | }, | 26 | }, | ||
56 | { | 27 | { | ||
n | 57 | "extra_author": "Sanja Fidler", | n | 28 | "extra_author": "Shuicheng Yan", |
58 | "orcid": "" | ||||
59 | }, | ||||
60 | { | ||||
61 | "extra_author": "Adela Barriuso", | ||||
62 | "orcid": "" | ||||
63 | }, | ||||
64 | { | ||||
65 | "extra_author": "Antonio Torralba", | ||||
66 | "orcid": "" | 29 | "orcid": "" | ||
67 | } | 30 | } | ||
68 | ], | 31 | ], | ||
69 | "groups": [ | 32 | "groups": [ | ||
n | 70 | { | n | ||
71 | "description": "", | ||||
72 | "display_name": "Affordance Segmentation", | ||||
73 | "id": "55ae002f-0381-45df-8ac3-60ab705f2267", | ||||
74 | "image_display_url": "", | ||||
75 | "name": "affordance-segmentation", | ||||
76 | "title": "Affordance Segmentation" | ||||
77 | }, | ||||
78 | { | ||||
79 | "description": "", | ||||
80 | "display_name": "Computer Vision", | ||||
81 | "id": "d09caf7c-26c7-4e4d-bb8e-49476a90ba25", | ||||
82 | "image_display_url": "", | ||||
83 | "name": "computer-vision", | ||||
84 | "title": "Computer Vision" | ||||
85 | }, | ||||
86 | { | ||||
87 | "description": "", | ||||
88 | "display_name": "Densely Annotated Image", | ||||
89 | "id": "34758656-c043-4db5-8f41-9dfac534aac9", | ||||
90 | "image_display_url": "", | ||||
91 | "name": "densely-annotated-image", | ||||
92 | "title": "Densely Annotated Image" | ||||
93 | }, | ||||
94 | { | ||||
95 | "description": "", | ||||
96 | "display_name": "Depth Estimation", | ||||
97 | "id": "1cfc3f7a-9b2f-4ee9-9d15-9883618b3218", | ||||
98 | "image_display_url": "", | ||||
99 | "name": "depth-estimation", | ||||
100 | "title": "Depth Estimation" | ||||
101 | }, | ||||
102 | { | ||||
103 | "description": "", | ||||
104 | "display_name": "General Scene Parsing", | ||||
105 | "id": "13c1e457-89c4-4d62-b932-007ca1a28c9a", | ||||
106 | "image_display_url": "", | ||||
107 | "name": "general-scene-parsing", | ||||
108 | "title": "General Scene Parsing" | ||||
109 | }, | ||||
110 | { | 33 | { | ||
111 | "description": "", | 34 | "description": "", | ||
112 | "display_name": "Image Segmentation", | 35 | "display_name": "Image Segmentation", | ||
113 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | 36 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | ||
114 | "image_display_url": "", | 37 | "image_display_url": "", | ||
115 | "name": "image-segmentation", | 38 | "name": "image-segmentation", | ||
116 | "title": "Image Segmentation" | 39 | "title": "Image Segmentation" | ||
117 | }, | 40 | }, | ||
118 | { | 41 | { | ||
119 | "description": "", | 42 | "description": "", | ||
n | 120 | "display_name": "Natural Scene Understanding", | n | 43 | "display_name": "Image Understanding", |
121 | "id": "ee36a499-a58b-4498-bcae-3ec4b0ec9a99", | 44 | "id": "638a25dd-1bfb-497e-89d2-287a377aa4f6", | ||
122 | "image_display_url": "", | 45 | "image_display_url": "", | ||
n | 123 | "name": "natural-scene-understanding", | n | 46 | "name": "image-understanding", |
124 | "title": "Natural Scene Understanding" | 47 | "title": "Image Understanding" | ||
125 | }, | 48 | }, | ||
126 | { | 49 | { | ||
127 | "description": "", | 50 | "description": "", | ||
128 | "display_name": "Object Detection", | 51 | "display_name": "Object Detection", | ||
129 | "id": "ca2cb1af-d31c-49b0-a1dd-62b22f2b9e20", | 52 | "id": "ca2cb1af-d31c-49b0-a1dd-62b22f2b9e20", | ||
130 | "image_display_url": "", | 53 | "image_display_url": "", | ||
131 | "name": "object-detection", | 54 | "name": "object-detection", | ||
132 | "title": "Object Detection" | 55 | "title": "Object Detection" | ||
n | 133 | }, | n | ||
134 | { | ||||
135 | "description": "", | ||||
136 | "display_name": "Object Segmentation", | ||||
137 | "id": "da2dd0b7-d324-469d-92f8-0af74e1a1bae", | ||||
138 | "image_display_url": "", | ||||
139 | "name": "object-segmentation", | ||||
140 | "title": "Object Segmentation" | ||||
141 | }, | ||||
142 | { | ||||
143 | "description": "", | ||||
144 | "display_name": "Scene Parsing", | ||||
145 | "id": "057fda39-a517-4596-b707-207f9da4c188", | ||||
146 | "image_display_url": "", | ||||
147 | "name": "scene-parsing", | ||||
148 | "title": "Scene Parsing" | ||||
149 | }, | ||||
150 | { | ||||
151 | "description": "", | ||||
152 | "display_name": "Scene Segmentation", | ||||
153 | "id": "69266901-10c5-4367-b59e-e438f0dd6e34", | ||||
154 | "image_display_url": "", | ||||
155 | "name": "scene-segmentation", | ||||
156 | "title": "Scene Segmentation" | ||||
157 | }, | ||||
158 | { | ||||
159 | "description": "", | ||||
160 | "display_name": "Scene Understanding", | ||||
161 | "id": "f3a9e19d-4b53-4b7d-bd4b-65c69132188a", | ||||
162 | "image_display_url": "", | ||||
163 | "name": "scene-understanding", | ||||
164 | "title": "Scene Understanding" | ||||
165 | }, | 56 | }, | ||
166 | { | 57 | { | ||
167 | "description": "", | 58 | "description": "", | ||
168 | "display_name": "Semantic Segmentation", | 59 | "display_name": "Semantic Segmentation", | ||
169 | "id": "8c3f2eee-f5f9-464d-9c0a-1a5e7a925c0e", | 60 | "id": "8c3f2eee-f5f9-464d-9c0a-1a5e7a925c0e", | ||
170 | "image_display_url": "", | 61 | "image_display_url": "", | ||
171 | "name": "semantic-segmentation", | 62 | "name": "semantic-segmentation", | ||
172 | "title": "Semantic Segmentation" | 63 | "title": "Semantic Segmentation" | ||
173 | } | 64 | } | ||
174 | ], | 65 | ], | ||
175 | "id": "9ae113e2-0320-4118-8359-5865d44cdff3", | 66 | "id": "9ae113e2-0320-4118-8359-5865d44cdff3", | ||
176 | "isopen": false, | 67 | "isopen": false, | ||
177 | "landing_page": "https://www.ade20k.com/", | 68 | "landing_page": "https://www.ade20k.com/", | ||
178 | "license_title": null, | 69 | "license_title": null, | ||
179 | "link_orkg": "", | 70 | "link_orkg": "", | ||
180 | "metadata_created": "2024-11-25T14:44:48.644589", | 71 | "metadata_created": "2024-11-25T14:44:48.644589", | ||
n | 181 | "metadata_modified": "2024-12-02T17:49:01.650264", | n | 72 | "metadata_modified": "2024-12-02T17:51:42.788824", |
182 | "name": "ade20k", | 73 | "name": "ade20k", | ||
n | 183 | "notes": "Semantic segmentation is a fundamental problem in computer | n | 74 | "notes": "Semantic segmentation is one of the fundamental prob-lems |
184 | vision. In the last years, thanks to the emergence of deep neural | 75 | in computer vision, whose task is to assign a seman-tic label to each | ||
185 | networks and to the availability of large-scale human-annotated | 76 | pixel of an image so that different classes can be distinguished.", | ||
186 | datasets, the state of the art has improved significantly.", | ||||
187 | "num_resources": 1, | 77 | "num_resources": 0, | ||
188 | "num_tags": 44, | 78 | "num_tags": 12, | ||
189 | "organization": { | 79 | "organization": { | ||
190 | "approval_status": "approved", | 80 | "approval_status": "approved", | ||
191 | "created": "2024-11-25T12:11:38.292601", | 81 | "created": "2024-11-25T12:11:38.292601", | ||
192 | "description": "", | 82 | "description": "", | ||
193 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 83 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
194 | "image_url": "", | 84 | "image_url": "", | ||
195 | "is_organization": true, | 85 | "is_organization": true, | ||
196 | "name": "no-organization", | 86 | "name": "no-organization", | ||
197 | "state": "active", | 87 | "state": "active", | ||
198 | "title": "No Organization", | 88 | "title": "No Organization", | ||
199 | "type": "organization" | 89 | "type": "organization" | ||
200 | }, | 90 | }, | ||
201 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 91 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
202 | "private": false, | 92 | "private": false, | ||
203 | "relationships_as_object": [], | 93 | "relationships_as_object": [], | ||
204 | "relationships_as_subject": [], | 94 | "relationships_as_subject": [], | ||
n | 205 | "resources": [ | n | 95 | "resources": [], |
206 | { | ||||
207 | "cache_last_updated": null, | ||||
208 | "cache_url": null, | ||||
209 | "created": "2024-12-02T18:38:42", | ||||
210 | "data": [ | ||||
211 | "dcterms:title", | ||||
212 | "dcterms:accessRights", | ||||
213 | "dcterms:creator", | ||||
214 | "dcterms:description", | ||||
215 | "dcterms:issued", | ||||
216 | "dcterms:language", | ||||
217 | "dcterms:identifier", | ||||
218 | "dcat:theme", | ||||
219 | "dcterms:type", | ||||
220 | "dcat:keyword", | ||||
221 | "dcat:landingPage", | ||||
222 | "dcterms:hasVersion", | ||||
223 | "dcterms:format", | ||||
224 | "mls:task", | ||||
225 | "datacite:isDescribedBy" | ||||
226 | ], | ||||
227 | "description": "The json representation of the dataset with its | ||||
228 | distributions based on DCAT.", | ||||
229 | "format": "JSON", | ||||
230 | "hash": "", | ||||
231 | "id": "4c75104f-ae86-4019-8d9c-467c0c320909", | ||||
232 | "last_modified": "2024-12-02T17:49:01.636593", | ||||
233 | "metadata_modified": "2024-12-02T17:49:01.653349", | ||||
234 | "mimetype": "application/json", | ||||
235 | "mimetype_inner": null, | ||||
236 | "name": "Original Metadata", | ||||
237 | "package_id": "9ae113e2-0320-4118-8359-5865d44cdff3", | ||||
238 | "position": 0, | ||||
239 | "resource_type": null, | ||||
240 | "size": 3209, | ||||
241 | "state": "active", | ||||
242 | "url": | ||||
243 | resource/4c75104f-ae86-4019-8d9c-467c0c320909/download/metadata.json", | ||||
244 | "url_type": "upload" | ||||
245 | } | ||||
246 | ], | ||||
247 | "services_used_list": "", | 96 | "services_used_list": "", | ||
248 | "state": "active", | 97 | "state": "active", | ||
249 | "tags": [ | 98 | "tags": [ | ||
250 | { | 99 | { | ||
n | 251 | "display_name": "ADE20K", | n | 100 | "display_name": "ADE20k", |
252 | "id": "0c499fde-6255-4c36-bce6-9183a1c622bf", | 101 | "id": "1d739766-b0a5-4e00-acf3-69c1d2449181", | ||
253 | "name": "ADE20K", | 102 | "name": "ADE20k", | ||
254 | "state": "active", | ||||
255 | "vocabulary_id": null | ||||
256 | }, | ||||
257 | { | ||||
258 | "display_name": "ADE20K dataset", | ||||
259 | "id": "3c3593bb-990c-4642-84c9-7d4f60c33f48", | ||||
260 | "name": "ADE20K dataset", | ||||
261 | "state": "active", | 103 | "state": "active", | ||
262 | "vocabulary_id": null | 104 | "vocabulary_id": null | ||
263 | }, | 105 | }, | ||
264 | { | 106 | { | ||
265 | "display_name": "Computer Vision", | 107 | "display_name": "Computer Vision", | ||
266 | "id": "77b96eda-8a43-406f-9c54-d87b14f3f63e", | 108 | "id": "77b96eda-8a43-406f-9c54-d87b14f3f63e", | ||
267 | "name": "Computer Vision", | 109 | "name": "Computer Vision", | ||
268 | "state": "active", | 110 | "state": "active", | ||
269 | "vocabulary_id": null | 111 | "vocabulary_id": null | ||
270 | }, | 112 | }, | ||
271 | { | 113 | { | ||
n | 272 | "display_name": "Deep Learning", | n | ||
273 | "id": "3feb7b21-e049-4dca-9372-0d438c483f6a", | ||||
274 | "name": "Deep Learning", | ||||
275 | "state": "active", | ||||
276 | "vocabulary_id": null | ||||
277 | }, | ||||
278 | { | ||||
279 | "display_name": "Depth Estimation", | ||||
280 | "id": "559df65c-baac-4373-9e7e-6732051e61ec", | ||||
281 | "name": "Depth Estimation", | ||||
282 | "state": "active", | ||||
283 | "vocabulary_id": null | ||||
284 | }, | ||||
285 | { | ||||
286 | "display_name": "Image Classification", | ||||
287 | "id": "418e2ddf-a1d3-42ac-ad05-156f79ca8e22", | ||||
288 | "name": "Image Classification", | ||||
289 | "state": "active", | ||||
290 | "vocabulary_id": null | ||||
291 | }, | ||||
292 | { | ||||
293 | "display_name": "Image Matting", | ||||
294 | "id": "80d1252a-d83f-4e8a-a19d-3cd31ec90a9d", | ||||
295 | "name": "Image Matting", | ||||
296 | "state": "active", | ||||
297 | "vocabulary_id": null | ||||
298 | }, | ||||
299 | { | ||||
300 | "display_name": "Image Segmentation", | 114 | "display_name": "Image Segmentation", | ||
301 | "id": "f5603951-aef2-4539-8066-15e72f32271b", | 115 | "id": "f5603951-aef2-4539-8066-15e72f32271b", | ||
302 | "name": "Image Segmentation", | 116 | "name": "Image Segmentation", | ||
303 | "state": "active", | 117 | "state": "active", | ||
304 | "vocabulary_id": null | 118 | "vocabulary_id": null | ||
305 | }, | 119 | }, | ||
306 | { | 120 | { | ||
n | 307 | "display_name": "Image segmentation", | n | 121 | "display_name": "Large Scale", |
308 | "id": "8ab34344-1929-4cd3-a55a-af732b0cf13b", | 122 | "id": "9d0a7af8-406e-4d7d-b558-ac2b45093bbf", | ||
309 | "name": "Image segmentation", | 123 | "name": "Large Scale", | ||
310 | "state": "active", | ||||
311 | "vocabulary_id": null | ||||
312 | }, | ||||
313 | { | ||||
314 | "display_name": "Instance Segmentation", | ||||
315 | "id": "b58d8dfe-1216-401d-8a2a-ceb09e07a013", | ||||
316 | "name": "Instance Segmentation", | ||||
317 | "state": "active", | ||||
318 | "vocabulary_id": null | ||||
319 | }, | ||||
320 | { | ||||
321 | "display_name": "Object Detection", | ||||
322 | "id": "44adc011-570b-46cf-9a65-ab72ca690477", | ||||
323 | "name": "Object Detection", | ||||
324 | "state": "active", | ||||
325 | "vocabulary_id": null | ||||
326 | }, | ||||
327 | { | ||||
328 | "display_name": "Object Recognition", | ||||
329 | "id": "6a4e0b0a-637f-41ee-a647-8af2e035b203", | ||||
330 | "name": "Object Recognition", | ||||
331 | "state": "active", | ||||
332 | "vocabulary_id": null | ||||
333 | }, | ||||
334 | { | ||||
335 | "display_name": "Object Segmentation", | ||||
336 | "id": "64ca9c05-1123-404f-b679-7768b2ff55d2", | ||||
337 | "name": "Object Segmentation", | ||||
338 | "state": "active", | ||||
339 | "vocabulary_id": null | ||||
340 | }, | ||||
341 | { | ||||
342 | "display_name": "Panoptic Segmentation", | ||||
343 | "id": "62316d6b-da93-440f-b9fd-8fddeba46d6d", | ||||
344 | "name": "Panoptic Segmentation", | ||||
345 | "state": "active", | ||||
346 | "vocabulary_id": null | ||||
347 | }, | ||||
348 | { | ||||
349 | "display_name": "Scene Parsing", | ||||
350 | "id": "273c0d57-c0e5-4d78-afa3-7b733228c50e", | ||||
351 | "name": "Scene Parsing", | ||||
352 | "state": "active", | ||||
353 | "vocabulary_id": null | ||||
354 | }, | ||||
355 | { | ||||
356 | "display_name": "Scene Segmentation", | ||||
357 | "id": "a421d736-de87-4bf6-b0ca-865cede9173a", | ||||
358 | "name": "Scene Segmentation", | ||||
359 | "state": "active", | ||||
360 | "vocabulary_id": null | ||||
361 | }, | ||||
362 | { | ||||
363 | "display_name": "Scene Understanding", | ||||
364 | "id": "d066cee5-d8a2-4d28-ba93-a32fa6a30b26", | ||||
365 | "name": "Scene Understanding", | ||||
366 | "state": "active", | 124 | "state": "active", | ||
367 | "vocabulary_id": null | 125 | "vocabulary_id": null | ||
368 | }, | 126 | }, | ||
369 | { | 127 | { | ||
370 | "display_name": "Semantic Segmentation", | 128 | "display_name": "Semantic Segmentation", | ||
371 | "id": "809ad6af-28cd-43bd-974d-055a5c0f2973", | 129 | "id": "809ad6af-28cd-43bd-974d-055a5c0f2973", | ||
372 | "name": "Semantic Segmentation", | 130 | "name": "Semantic Segmentation", | ||
373 | "state": "active", | 131 | "state": "active", | ||
374 | "vocabulary_id": null | 132 | "vocabulary_id": null | ||
375 | }, | 133 | }, | ||
376 | { | 134 | { | ||
n | 377 | "display_name": "Semantic segmentation", | n | ||
378 | "id": "ca545ae5-2da2-4e71-98b7-3268890347d1", | ||||
379 | "name": "Semantic segmentation", | ||||
380 | "state": "active", | ||||
381 | "vocabulary_id": null | ||||
382 | }, | ||||
383 | { | ||||
384 | "display_name": "ade20k", | ||||
385 | "id": "a2e95889-f8d0-4aea-ba84-928e14599d95", | ||||
386 | "name": "ade20k", | ||||
387 | "state": "active", | ||||
388 | "vocabulary_id": null | ||||
389 | }, | ||||
390 | { | ||||
391 | "display_name": "affordance", | ||||
392 | "id": "b6fa4863-e3f3-4f5d-8bc3-f4336741ad31", | ||||
393 | "name": "affordance", | ||||
394 | "state": "active", | ||||
395 | "vocabulary_id": null | ||||
396 | }, | ||||
397 | { | ||||
398 | "display_name": "benchmark", | ||||
399 | "id": "e3d4984e-822c-4023-a134-9cacabcfc36d", | ||||
400 | "name": "benchmark", | ||||
401 | "state": "active", | ||||
402 | "vocabulary_id": null | ||||
403 | }, | ||||
404 | { | ||||
405 | "display_name": "dataset", | ||||
406 | "id": "ce5ad030-ca3d-47e6-abd1-5c92a2806f1b", | ||||
407 | "name": "dataset", | ||||
408 | "state": "active", | ||||
409 | "vocabulary_id": null | ||||
410 | }, | ||||
411 | { | ||||
412 | "display_name": "dense prediction", | ||||
413 | "id": "e6dac3ed-8986-4b19-a363-bae3a3ececad", | ||||
414 | "name": "dense prediction", | ||||
415 | "state": "active", | ||||
416 | "vocabulary_id": null | ||||
417 | }, | ||||
418 | { | ||||
419 | "display_name": "densely annotated image", | ||||
420 | "id": "9f78680b-2af5-40da-9606-acb0a5d69e16", | ||||
421 | "name": "densely annotated image", | ||||
422 | "state": "active", | ||||
423 | "vocabulary_id": null | ||||
424 | }, | ||||
425 | { | ||||
426 | "display_name": "depth estimation", | ||||
427 | "id": "3c08a798-cec3-4682-a668-4f95d6d8ad18", | ||||
428 | "name": "depth estimation", | ||||
429 | "state": "active", | ||||
430 | "vocabulary_id": null | ||||
431 | }, | ||||
432 | { | ||||
433 | "display_name": "general scene parsing", | ||||
434 | "id": "e8f845f5-3754-4eac-825e-27aaf9551e87", | ||||
435 | "name": "general scene parsing", | ||||
436 | "state": "active", | ||||
437 | "vocabulary_id": null | ||||
438 | }, | ||||
439 | { | ||||
440 | "display_name": "image classification", | ||||
441 | "id": "34936550-ce1a-41b5-8c58-23081a6c673d", | ||||
442 | "name": "image classification", | ||||
443 | "state": "active", | ||||
444 | "vocabulary_id": null | ||||
445 | }, | ||||
446 | { | ||||
447 | "display_name": "image dataset", | 135 | "display_name": "image understanding", | ||
448 | "id": "d3acafab-ad07-46a1-88d5-540c2fd41466", | 136 | "id": "a209b790-086a-4345-b6af-b7b011b6e040", | ||
449 | "name": "image dataset", | ||||
450 | "state": "active", | ||||
451 | "vocabulary_id": null | ||||
452 | }, | ||||
453 | { | ||||
454 | "display_name": "image segmentation", | ||||
455 | "id": "7eaed78e-c73a-4929-a8c9-60265069f59a", | ||||
456 | "name": "image segmentation", | 137 | "name": "image understanding", | ||
457 | "state": "active", | 138 | "state": "active", | ||
458 | "vocabulary_id": null | 139 | "vocabulary_id": null | ||
459 | }, | 140 | }, | ||
460 | { | 141 | { | ||
461 | "display_name": "instance segmentation", | 142 | "display_name": "instance segmentation", | ||
462 | "id": "e74e609d-6e81-4b83-b74e-fa3dd8f185f4", | 143 | "id": "e74e609d-6e81-4b83-b74e-fa3dd8f185f4", | ||
463 | "name": "instance segmentation", | 144 | "name": "instance segmentation", | ||
464 | "state": "active", | 145 | "state": "active", | ||
465 | "vocabulary_id": null | 146 | "vocabulary_id": null | ||
466 | }, | 147 | }, | ||
467 | { | 148 | { | ||
468 | "display_name": "large-scale dataset", | 149 | "display_name": "large-scale dataset", | ||
469 | "id": "a9c694bf-f591-4625-a20e-d53d3f90d489", | 150 | "id": "a9c694bf-f591-4625-a20e-d53d3f90d489", | ||
470 | "name": "large-scale dataset", | 151 | "name": "large-scale dataset", | ||
471 | "state": "active", | 152 | "state": "active", | ||
472 | "vocabulary_id": null | 153 | "vocabulary_id": null | ||
473 | }, | 154 | }, | ||
474 | { | 155 | { | ||
n | 475 | "display_name": "monocular", | n | ||
476 | "id": "4edc7d74-a0fd-4e7a-99cf-586781659392", | ||||
477 | "name": "monocular", | ||||
478 | "state": "active", | ||||
479 | "vocabulary_id": null | ||||
480 | }, | ||||
481 | { | ||||
482 | "display_name": "natural scene", | ||||
483 | "id": "814bc820-4a10-4347-a61a-39d9f590f583", | ||||
484 | "name": "natural scene", | ||||
485 | "state": "active", | ||||
486 | "vocabulary_id": null | ||||
487 | }, | ||||
488 | { | ||||
489 | "display_name": "object detection", | 156 | "display_name": "object parts", | ||
490 | "id": "607283c7-9e12-4167-9101-7f8078fb6537", | 157 | "id": "61c19f71-3eb5-4a82-8b0d-3134568a87fa", | ||
491 | "name": "object detection", | 158 | "name": "object parts", | ||
492 | "state": "active", | 159 | "state": "active", | ||
493 | "vocabulary_id": null | 160 | "vocabulary_id": null | ||
494 | }, | 161 | }, | ||
495 | { | 162 | { | ||
496 | "display_name": "panoptic segmentation", | 163 | "display_name": "panoptic segmentation", | ||
497 | "id": "fcac67e0-cdb8-4644-b28c-4d0361593beb", | 164 | "id": "fcac67e0-cdb8-4644-b28c-4d0361593beb", | ||
498 | "name": "panoptic segmentation", | 165 | "name": "panoptic segmentation", | ||
499 | "state": "active", | 166 | "state": "active", | ||
500 | "vocabulary_id": null | 167 | "vocabulary_id": null | ||
501 | }, | 168 | }, | ||
502 | { | 169 | { | ||
n | 503 | "display_name": "scene parsing", | n | ||
504 | "id": "13b041ff-dde1-493d-9643-a1c53f9da3be", | ||||
505 | "name": "scene parsing", | ||||
506 | "state": "active", | ||||
507 | "vocabulary_id": null | ||||
508 | }, | ||||
509 | { | ||||
510 | "display_name": "scene understanding", | 170 | "display_name": "self-supervised learning", | ||
511 | "id": "d56fbd48-e5ed-410d-8c4f-e698955e28fa", | 171 | "id": "34efc3c5-9199-4353-8b0e-a7430edfcb43", | ||
512 | "name": "scene understanding", | 172 | "name": "self-supervised learning", | ||
513 | "state": "active", | ||||
514 | "vocabulary_id": null | ||||
515 | }, | ||||
516 | { | ||||
517 | "display_name": "segmentation", | ||||
518 | "id": "7ce0e509-9f57-44c4-a015-f1ab9872bb44", | ||||
519 | "name": "segmentation", | ||||
520 | "state": "active", | ||||
521 | "vocabulary_id": null | ||||
522 | }, | ||||
523 | { | ||||
524 | "display_name": "semantic image segmentation", | ||||
525 | "id": "e391afda-584a-456d-9a76-ec0437768ee7", | ||||
526 | "name": "semantic image segmentation", | ||||
527 | "state": "active", | 173 | "state": "active", | ||
528 | "vocabulary_id": null | 174 | "vocabulary_id": null | ||
529 | }, | 175 | }, | ||
530 | { | 176 | { | ||
531 | "display_name": "semantic segmentation", | 177 | "display_name": "semantic segmentation", | ||
532 | "id": "f9237911-e9df-4dd5-a9aa-301b6d4969af", | 178 | "id": "f9237911-e9df-4dd5-a9aa-301b6d4969af", | ||
533 | "name": "semantic segmentation", | 179 | "name": "semantic segmentation", | ||
534 | "state": "active", | 180 | "state": "active", | ||
535 | "vocabulary_id": null | 181 | "vocabulary_id": null | ||
n | 536 | }, | n | ||
537 | { | ||||
538 | "display_name": "transformers", | ||||
539 | "id": "de8ae43b-acd0-4152-8c68-d20cb235cd5f", | ||||
540 | "name": "transformers", | ||||
541 | "state": "active", | ||||
542 | "vocabulary_id": null | ||||
543 | }, | ||||
544 | { | ||||
545 | "display_name": "urban scenes", | ||||
546 | "id": "e4298c72-6483-4627-8b7f-743733990c13", | ||||
547 | "name": "urban scenes", | ||||
548 | "state": "active", | ||||
549 | "vocabulary_id": null | ||||
550 | }, | ||||
551 | { | ||||
552 | "display_name": "vision transformer", | ||||
553 | "id": "04ac1903-800a-43f0-a457-4b35747b689a", | ||||
554 | "name": "vision transformer", | ||||
555 | "state": "active", | ||||
556 | "vocabulary_id": null | ||||
557 | } | 182 | } | ||
558 | ], | 183 | ], | ||
t | 559 | "title": "ADE20K", | t | 184 | "title": "ADE20k", |
560 | "type": "dataset", | 185 | "type": "dataset", | ||
561 | "version": "" | 186 | "version": "" | ||
562 | } | 187 | } |