Changes
On December 2, 2024 at 11:48:06 PM UTC, admin:
-
Changed title to Microsoft COCO: Common objects in context (previously Microsoft COCO: Common Objects in Context)
-
Set author of Microsoft COCO: Common objects in context to Pedro O. Pinheiro (previously Tsung-Yi Lin)
-
Updated description of Microsoft COCO: Common objects in context from
This paper proposes a framework to address the issue of requiring precise masks for existing FSS tasks, which address weakly-supervised few-shot segmentation tasks with only category information.
toThe COCO dataset is a large-scale benchmark for object detection and segmentation.
-
Removed the following tags from Microsoft COCO: Common objects in context
- COCO dataset
- semantic segmentation
- benchmarking
- instance segmentation
- Instance Segmentation
- Computer Vision
- image segmentation
- Common Objects
- Large Scale
- attribute recognition
- Visual Fact Extraction
- Image Segmentation
- Semantic Segmentation
- Visual Question Answering
- Keypoint Detection
- image captioning
- Image Captioning
- visual recognition
- Image Classification
-
Added the following tags to Microsoft COCO: Common objects in context
-
Changed value of field
defined_in
tohttps://doi.org/10.48550/arXiv.2406.13316
in Microsoft COCO: Common objects in context -
Changed value of field
extra_authors
to[{'extra_author': 'Tsung-Yi Lin', 'orcid': ''}, {'extra_author': 'Ronan Collobert', 'orcid': ''}, {'extra_author': 'Piotr DollĀ“ar', 'orcid': ''}]
in Microsoft COCO: Common objects in context -
Changed value of field
citation
to['https://doi.org/10.48550/arXiv.2107.11291', 'https://doi.org/10.48550/arXiv.1803.07066', 'https://doi.org/10.48550/arXiv.2108.02266', 'https://doi.org/10.48550/arXiv.1709.01476']
in Microsoft COCO: Common objects in context -
Deleted resource Original Metadata from Microsoft COCO: Common objects in context
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
n | 3 | "author": "Tsung-Yi Lin", | n | 3 | "author": "Pedro O. Pinheiro", |
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [ | 5 | "citation": [ | ||
n | 6 | "https://doi.org/10.48550/arXiv.2206.04664", | n | ||
7 | "https://doi.org/10.48550/arXiv.2303.17908", | 6 | "https://doi.org/10.48550/arXiv.2107.11291", | ||
8 | "https://doi.org/10.48550/arXiv.1708.04225", | ||||
9 | "https://doi.org/10.48550/arXiv.2203.05294", | ||||
10 | "https://doi.org/10.48550/arXiv.2206.06363", | ||||
11 | "https://doi.org/10.48550/arXiv.2406.14924", | ||||
12 | "https://doi.org/10.1016/j.knosys.2024.112004", | ||||
13 | "https://doi.org/10.48550/arXiv.2305.16103", | ||||
14 | "https://doi.org/10.48550/arXiv.2305.17718", | ||||
15 | "https://doi.org/10.48550/arXiv.1604.00466", | 7 | "https://doi.org/10.48550/arXiv.1803.07066", | ||
16 | "https://doi.org/10.48550/arXiv.1812.10889", | ||||
17 | "https://doi.org/10.48550/arXiv.2212.00280" | 8 | "https://doi.org/10.48550/arXiv.2108.02266", | ||
9 | "https://doi.org/10.48550/arXiv.1709.01476" | ||||
18 | ], | 10 | ], | ||
19 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 11 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
n | 20 | "defined_in": "https://doi.org/10.48550/arXiv.2301.08898", | n | 12 | "defined_in": "https://doi.org/10.48550/arXiv.2406.13316", |
21 | "doi": "10.57702/lydv1ylk", | 13 | "doi": "10.57702/lydv1ylk", | ||
22 | "doi_date_published": "2024-12-02", | 14 | "doi_date_published": "2024-12-02", | ||
23 | "doi_publisher": "TIB", | 15 | "doi_publisher": "TIB", | ||
24 | "doi_status": true, | 16 | "doi_status": true, | ||
25 | "domain": "https://service.tib.eu/ldmservice", | 17 | "domain": "https://service.tib.eu/ldmservice", | ||
26 | "extra_authors": [ | 18 | "extra_authors": [ | ||
27 | { | 19 | { | ||
n | 28 | "extra_author": "Michael Maire", | n | 20 | "extra_author": "Tsung-Yi Lin", |
29 | "orcid": "" | 21 | "orcid": "" | ||
30 | }, | 22 | }, | ||
31 | { | 23 | { | ||
n | 32 | "extra_author": "Serge Belongie", | n | 24 | "extra_author": "Ronan Collobert", |
33 | "orcid": "" | 25 | "orcid": "" | ||
34 | }, | 26 | }, | ||
35 | { | 27 | { | ||
n | 36 | "extra_author": "James Hays", | n | ||
37 | "orcid": "" | ||||
38 | }, | ||||
39 | { | ||||
40 | "extra_author": "Pietro Perona", | ||||
41 | "orcid": "" | ||||
42 | }, | ||||
43 | { | ||||
44 | "extra_author": "Deva Ramanan", | ||||
45 | "orcid": "" | ||||
46 | }, | ||||
47 | { | ||||
48 | "extra_author": "Piotr Doll\u00e1r", | 28 | "extra_author": "Piotr Doll\u00b4ar", | ||
49 | "orcid": "" | ||||
50 | }, | ||||
51 | { | ||||
52 | "extra_author": "C Lawrence Zitnick", | ||||
53 | "orcid": "" | 29 | "orcid": "" | ||
54 | } | 30 | } | ||
55 | ], | 31 | ], | ||
56 | "groups": [ | 32 | "groups": [ | ||
n | 57 | { | n | ||
58 | "description": "", | ||||
59 | "display_name": "Benchmarking", | ||||
60 | "id": "87ddb657-b4dd-43ab-91dd-3ea19d62c2c7", | ||||
61 | "image_display_url": "", | ||||
62 | "name": "benchmarking", | ||||
63 | "title": "Benchmarking" | ||||
64 | }, | ||||
65 | { | ||||
66 | "description": "", | ||||
67 | "display_name": "Image Captioning", | ||||
68 | "id": "7a76ce67-2607-4da9-a837-d2017dc33ec6", | ||||
69 | "image_display_url": "", | ||||
70 | "name": "image-captioning", | ||||
71 | "title": "Image Captioning" | ||||
72 | }, | ||||
73 | { | 33 | { | ||
74 | "description": "", | 34 | "description": "", | ||
75 | "display_name": "Image Classification", | 35 | "display_name": "Image Classification", | ||
76 | "id": "18b77292-26aa-4caf-89ed-cbd35fa60474", | 36 | "id": "18b77292-26aa-4caf-89ed-cbd35fa60474", | ||
77 | "image_display_url": "", | 37 | "image_display_url": "", | ||
78 | "name": "image-classification", | 38 | "name": "image-classification", | ||
79 | "title": "Image Classification" | 39 | "title": "Image Classification" | ||
80 | }, | 40 | }, | ||
81 | { | 41 | { | ||
82 | "description": "", | 42 | "description": "", | ||
83 | "display_name": "Image Segmentation", | 43 | "display_name": "Image Segmentation", | ||
84 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | 44 | "id": "7c8cc5f1-a9b2-4924-82ec-9e3aa3049a04", | ||
85 | "image_display_url": "", | 45 | "image_display_url": "", | ||
86 | "name": "image-segmentation", | 46 | "name": "image-segmentation", | ||
87 | "title": "Image Segmentation" | 47 | "title": "Image Segmentation" | ||
88 | }, | 48 | }, | ||
89 | { | 49 | { | ||
90 | "description": "", | 50 | "description": "", | ||
n | 91 | "display_name": "Instance Segmentation", | n | ||
92 | "id": "f856527a-3d35-4c73-8c09-bf3f4a3bbb9f", | ||||
93 | "image_display_url": "", | ||||
94 | "name": "instance-segmentation", | ||||
95 | "title": "Instance Segmentation" | ||||
96 | }, | ||||
97 | { | ||||
98 | "description": "", | ||||
99 | "display_name": "Keypoint Detection", | ||||
100 | "id": "67f02444-b264-4177-999f-2fd8d858d8a4", | ||||
101 | "image_display_url": "", | ||||
102 | "name": "keypoint-detection", | ||||
103 | "title": "Keypoint Detection" | ||||
104 | }, | ||||
105 | { | ||||
106 | "description": "", | ||||
107 | "display_name": "Object Detection", | 51 | "display_name": "Object Detection", | ||
108 | "id": "ca2cb1af-d31c-49b0-a1dd-62b22f2b9e20", | 52 | "id": "ca2cb1af-d31c-49b0-a1dd-62b22f2b9e20", | ||
109 | "image_display_url": "", | 53 | "image_display_url": "", | ||
110 | "name": "object-detection", | 54 | "name": "object-detection", | ||
111 | "title": "Object Detection" | 55 | "title": "Object Detection" | ||
112 | }, | 56 | }, | ||
113 | { | 57 | { | ||
114 | "description": "", | 58 | "description": "", | ||
n | 115 | "display_name": "Semantic Segmentation", | n | 59 | "display_name": "Object Segmentation", |
116 | "id": "8c3f2eee-f5f9-464d-9c0a-1a5e7a925c0e", | 60 | "id": "da2dd0b7-d324-469d-92f8-0af74e1a1bae", | ||
117 | "image_display_url": "", | 61 | "image_display_url": "", | ||
n | 118 | "name": "semantic-segmentation", | n | 62 | "name": "object-segmentation", |
119 | "title": "Semantic Segmentation" | 63 | "title": "Object Segmentation" | ||
120 | }, | ||||
121 | { | ||||
122 | "description": "", | ||||
123 | "display_name": "Visual Fact Extraction", | ||||
124 | "id": "de2e3d81-f988-4e9c-b317-14120c9b50e0", | ||||
125 | "image_display_url": "", | ||||
126 | "name": "visual-fact-extraction", | ||||
127 | "title": "Visual Fact Extraction" | ||||
128 | }, | ||||
129 | { | ||||
130 | "description": "", | ||||
131 | "display_name": "Visual Question Answering", | ||||
132 | "id": "e15f82e8-481e-43a8-8a39-d6e0751ace6c", | ||||
133 | "image_display_url": "", | ||||
134 | "name": "visual-question-answering", | ||||
135 | "title": "Visual Question Answering" | ||||
136 | }, | ||||
137 | { | ||||
138 | "description": "", | ||||
139 | "display_name": "Visual Recognition", | ||||
140 | "id": "10de84f4-1539-442d-bfc5-ccee01875f41", | ||||
141 | "image_display_url": "", | ||||
142 | "name": "visual-recognition", | ||||
143 | "title": "Visual Recognition" | ||||
144 | } | 64 | } | ||
145 | ], | 65 | ], | ||
146 | "id": "e6bc4c67-2b8f-4884-b1ea-a3c455223145", | 66 | "id": "e6bc4c67-2b8f-4884-b1ea-a3c455223145", | ||
147 | "isopen": false, | 67 | "isopen": false, | ||
148 | "landing_page": "https://cocodataset.org/", | 68 | "landing_page": "https://cocodataset.org/", | ||
149 | "license_title": null, | 69 | "license_title": null, | ||
150 | "link_orkg": "", | 70 | "link_orkg": "", | ||
151 | "metadata_created": "2024-12-02T18:05:17.466586", | 71 | "metadata_created": "2024-12-02T18:05:17.466586", | ||
n | 152 | "metadata_modified": "2024-12-02T18:05:17.867314", | n | 72 | "metadata_modified": "2024-12-02T23:48:05.150984", |
153 | "name": "microsoft-coco--common-objects-in-context", | 73 | "name": "microsoft-coco--common-objects-in-context", | ||
n | 154 | "notes": "This paper proposes a framework to address the issue of | n | 74 | "notes": "The COCO dataset is a large-scale benchmark for object |
155 | requiring precise masks for existing FSS tasks, which address | 75 | detection and segmentation.", | ||
156 | weakly-supervised few-shot segmentation tasks with only category | ||||
157 | information.", | ||||
158 | "num_resources": 1, | 76 | "num_resources": 0, | ||
159 | "num_tags": 25, | 77 | "num_tags": 13, | ||
160 | "organization": { | 78 | "organization": { | ||
161 | "approval_status": "approved", | 79 | "approval_status": "approved", | ||
162 | "created": "2024-11-25T12:11:38.292601", | 80 | "created": "2024-11-25T12:11:38.292601", | ||
163 | "description": "", | 81 | "description": "", | ||
164 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 82 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
165 | "image_url": "", | 83 | "image_url": "", | ||
166 | "is_organization": true, | 84 | "is_organization": true, | ||
167 | "name": "no-organization", | 85 | "name": "no-organization", | ||
168 | "state": "active", | 86 | "state": "active", | ||
169 | "title": "No Organization", | 87 | "title": "No Organization", | ||
170 | "type": "organization" | 88 | "type": "organization" | ||
171 | }, | 89 | }, | ||
172 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 90 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
173 | "private": false, | 91 | "private": false, | ||
174 | "relationships_as_object": [], | 92 | "relationships_as_object": [], | ||
175 | "relationships_as_subject": [], | 93 | "relationships_as_subject": [], | ||
n | 176 | "resources": [ | n | 94 | "resources": [], |
177 | { | ||||
178 | "cache_last_updated": null, | ||||
179 | "cache_url": null, | ||||
180 | "created": "2024-12-02T18:38:42", | ||||
181 | "data": [ | ||||
182 | "dcterms:title", | ||||
183 | "dcterms:accessRights", | ||||
184 | "dcterms:creator", | ||||
185 | "dcterms:description", | ||||
186 | "dcterms:issued", | ||||
187 | "dcterms:language", | ||||
188 | "dcterms:identifier", | ||||
189 | "dcat:theme", | ||||
190 | "dcterms:type", | ||||
191 | "dcat:keyword", | ||||
192 | "dcat:landingPage", | ||||
193 | "dcterms:hasVersion", | ||||
194 | "dcterms:format", | ||||
195 | "mls:task", | ||||
196 | "datacite:isDescribedBy" | ||||
197 | ], | ||||
198 | "description": "The json representation of the dataset with its | ||||
199 | distributions based on DCAT.", | ||||
200 | "format": "JSON", | ||||
201 | "hash": "", | ||||
202 | "id": "37f8d113-ae38-4465-b59d-a02d1f4f3470", | ||||
203 | "last_modified": "2024-12-02T18:05:17.856404", | ||||
204 | "metadata_modified": "2024-12-02T18:05:17.870042", | ||||
205 | "mimetype": "application/json", | ||||
206 | "mimetype_inner": null, | ||||
207 | "name": "Original Metadata", | ||||
208 | "package_id": "e6bc4c67-2b8f-4884-b1ea-a3c455223145", | ||||
209 | "position": 0, | ||||
210 | "resource_type": null, | ||||
211 | "size": 2163, | ||||
212 | "state": "active", | ||||
213 | "url": | ||||
214 | resource/37f8d113-ae38-4465-b59d-a02d1f4f3470/download/metadata.json", | ||||
215 | "url_type": "upload" | ||||
216 | } | ||||
217 | ], | ||||
218 | "services_used_list": "", | 95 | "services_used_list": "", | ||
219 | "state": "active", | 96 | "state": "active", | ||
220 | "tags": [ | 97 | "tags": [ | ||
221 | { | 98 | { | ||
222 | "display_name": "COCO", | 99 | "display_name": "COCO", | ||
223 | "id": "892a6596-c332-4778-b0bb-a1d1046c3cb8", | 100 | "id": "892a6596-c332-4778-b0bb-a1d1046c3cb8", | ||
224 | "name": "COCO", | 101 | "name": "COCO", | ||
225 | "state": "active", | 102 | "state": "active", | ||
226 | "vocabulary_id": null | 103 | "vocabulary_id": null | ||
227 | }, | 104 | }, | ||
228 | { | 105 | { | ||
n | 229 | "display_name": "COCO dataset", | n | 106 | "display_name": "COCO Dataset", |
230 | "id": "d3e1175b-c887-4a04-a77b-8ebe58dd7bd6", | 107 | "id": "501caec4-572d-4f1a-9cc1-1eb3363a8a53", | ||
231 | "name": "COCO dataset", | 108 | "name": "COCO Dataset", | ||
232 | "state": "active", | 109 | "state": "active", | ||
233 | "vocabulary_id": null | 110 | "vocabulary_id": null | ||
234 | }, | 111 | }, | ||
235 | { | 112 | { | ||
n | 236 | "display_name": "Common Objects", | n | 113 | "display_name": "Common Objects in Context", |
237 | "id": "1dd1d2d3-a447-4c8e-a75b-518b89caa4d1", | 114 | "id": "a4750412-fbae-40d2-b964-eb66e6334896", | ||
238 | "name": "Common Objects", | 115 | "name": "Common Objects in Context", | ||
239 | "state": "active", | 116 | "state": "active", | ||
240 | "vocabulary_id": null | 117 | "vocabulary_id": null | ||
241 | }, | 118 | }, | ||
242 | { | 119 | { | ||
n | 243 | "display_name": "Computer Vision", | n | ||
244 | "id": "77b96eda-8a43-406f-9c54-d87b14f3f63e", | ||||
245 | "name": "Computer Vision", | ||||
246 | "state": "active", | ||||
247 | "vocabulary_id": null | ||||
248 | }, | ||||
249 | { | ||||
250 | "display_name": "Image Captioning", | ||||
251 | "id": "c708cba4-0a1f-45c9-826f-898857783343", | ||||
252 | "name": "Image Captioning", | ||||
253 | "state": "active", | ||||
254 | "vocabulary_id": null | ||||
255 | }, | ||||
256 | { | ||||
257 | "display_name": "Image Classification", | ||||
258 | "id": "418e2ddf-a1d3-42ac-ad05-156f79ca8e22", | ||||
259 | "name": "Image Classification", | ||||
260 | "state": "active", | ||||
261 | "vocabulary_id": null | ||||
262 | }, | ||||
263 | { | ||||
264 | "display_name": "Image Segmentation", | 120 | "display_name": "Image segmentation", | ||
265 | "id": "f5603951-aef2-4539-8066-15e72f32271b", | 121 | "id": "8ab34344-1929-4cd3-a55a-af732b0cf13b", | ||
266 | "name": "Image Segmentation", | 122 | "name": "Image segmentation", | ||
267 | "state": "active", | ||||
268 | "vocabulary_id": null | ||||
269 | }, | ||||
270 | { | ||||
271 | "display_name": "Instance Segmentation", | ||||
272 | "id": "b58d8dfe-1216-401d-8a2a-ceb09e07a013", | ||||
273 | "name": "Instance Segmentation", | ||||
274 | "state": "active", | ||||
275 | "vocabulary_id": null | ||||
276 | }, | ||||
277 | { | ||||
278 | "display_name": "Keypoint Detection", | ||||
279 | "id": "aa2c073b-d3df-4804-96e5-e732af5ecf8f", | ||||
280 | "name": "Keypoint Detection", | ||||
281 | "state": "active", | ||||
282 | "vocabulary_id": null | ||||
283 | }, | ||||
284 | { | ||||
285 | "display_name": "Large Scale", | ||||
286 | "id": "9d0a7af8-406e-4d7d-b558-ac2b45093bbf", | ||||
287 | "name": "Large Scale", | ||||
288 | "state": "active", | 123 | "state": "active", | ||
289 | "vocabulary_id": null | 124 | "vocabulary_id": null | ||
290 | }, | 125 | }, | ||
291 | { | 126 | { | ||
292 | "display_name": "Object Detection", | 127 | "display_name": "Object Detection", | ||
293 | "id": "44adc011-570b-46cf-9a65-ab72ca690477", | 128 | "id": "44adc011-570b-46cf-9a65-ab72ca690477", | ||
294 | "name": "Object Detection", | 129 | "name": "Object Detection", | ||
295 | "state": "active", | 130 | "state": "active", | ||
296 | "vocabulary_id": null | 131 | "vocabulary_id": null | ||
297 | }, | 132 | }, | ||
298 | { | 133 | { | ||
n | 299 | "display_name": "Semantic Segmentation", | n | 134 | "display_name": "Object Segmentation", |
300 | "id": "809ad6af-28cd-43bd-974d-055a5c0f2973", | 135 | "id": "64ca9c05-1123-404f-b679-7768b2ff55d2", | ||
301 | "name": "Semantic Segmentation", | 136 | "name": "Object Segmentation", | ||
302 | "state": "active", | 137 | "state": "active", | ||
303 | "vocabulary_id": null | 138 | "vocabulary_id": null | ||
304 | }, | 139 | }, | ||
305 | { | 140 | { | ||
n | 306 | "display_name": "Visual Fact Extraction", | n | 141 | "display_name": "Object detection", |
307 | "id": "40b261ac-d57a-4a84-8608-00011352dc53", | 142 | "id": "84a57b7d-e522-4fc2-9f65-9aeb121659f1", | ||
308 | "name": "Visual Fact Extraction", | 143 | "name": "Object detection", | ||
309 | "state": "active", | ||||
310 | "vocabulary_id": null | ||||
311 | }, | ||||
312 | { | ||||
313 | "display_name": "Visual Question Answering", | ||||
314 | "id": "d8aee55f-7e04-411f-8ce5-0794bc0406b0", | ||||
315 | "name": "Visual Question Answering", | ||||
316 | "state": "active", | ||||
317 | "vocabulary_id": null | ||||
318 | }, | ||||
319 | { | ||||
320 | "display_name": "attribute recognition", | ||||
321 | "id": "b4a34c8e-b5f2-45cf-9258-43adb3f3ce57", | ||||
322 | "name": "attribute recognition", | ||||
323 | "state": "active", | ||||
324 | "vocabulary_id": null | ||||
325 | }, | ||||
326 | { | ||||
327 | "display_name": "benchmarking", | ||||
328 | "id": "d9f3d223-10a3-41fb-9231-bed79d8943c0", | ||||
329 | "name": "benchmarking", | ||||
330 | "state": "active", | 144 | "state": "active", | ||
331 | "vocabulary_id": null | 145 | "vocabulary_id": null | ||
332 | }, | 146 | }, | ||
333 | { | 147 | { | ||
334 | "display_name": "coco", | 148 | "display_name": "coco", | ||
335 | "id": "7eb81be7-b293-4739-8f12-cd4d61804e64", | 149 | "id": "7eb81be7-b293-4739-8f12-cd4d61804e64", | ||
336 | "name": "coco", | 150 | "name": "coco", | ||
337 | "state": "active", | 151 | "state": "active", | ||
338 | "vocabulary_id": null | 152 | "vocabulary_id": null | ||
339 | }, | 153 | }, | ||
340 | { | 154 | { | ||
n | 341 | "display_name": "image annotation", | n | 155 | "display_name": "common objects", |
342 | "id": "6734bcb9-ad78-4289-8816-09b71832ab8e", | 156 | "id": "15ad5a07-90b4-4c32-b9e2-65a5ee1b86da", | ||
343 | "name": "image annotation", | 157 | "name": "common objects", | ||
344 | "state": "active", | 158 | "state": "active", | ||
345 | "vocabulary_id": null | 159 | "vocabulary_id": null | ||
346 | }, | 160 | }, | ||
347 | { | 161 | { | ||
n | n | 162 | "display_name": "context", | ||
163 | "id": "201a528e-8e79-4e2f-bdd4-08d8be4d1a14", | ||||
164 | "name": "context", | ||||
165 | "state": "active", | ||||
166 | "vocabulary_id": null | ||||
167 | }, | ||||
168 | { | ||||
348 | "display_name": "image captioning", | 169 | "display_name": "image annotation", | ||
349 | "id": "f1bbe827-a03a-4280-b9fa-0599ccfc0541", | 170 | "id": "6734bcb9-ad78-4289-8816-09b71832ab8e", | ||
350 | "name": "image captioning", | 171 | "name": "image annotation", | ||
351 | "state": "active", | 172 | "state": "active", | ||
352 | "vocabulary_id": null | 173 | "vocabulary_id": null | ||
353 | }, | 174 | }, | ||
354 | { | 175 | { | ||
355 | "display_name": "image classification", | 176 | "display_name": "image classification", | ||
356 | "id": "34936550-ce1a-41b5-8c58-23081a6c673d", | 177 | "id": "34936550-ce1a-41b5-8c58-23081a6c673d", | ||
357 | "name": "image classification", | 178 | "name": "image classification", | ||
358 | "state": "active", | 179 | "state": "active", | ||
359 | "vocabulary_id": null | 180 | "vocabulary_id": null | ||
360 | }, | 181 | }, | ||
361 | { | 182 | { | ||
n | 362 | "display_name": "image segmentation", | n | ||
363 | "id": "7eaed78e-c73a-4929-a8c9-60265069f59a", | ||||
364 | "name": "image segmentation", | ||||
365 | "state": "active", | ||||
366 | "vocabulary_id": null | ||||
367 | }, | ||||
368 | { | ||||
369 | "display_name": "instance segmentation", | ||||
370 | "id": "e74e609d-6e81-4b83-b74e-fa3dd8f185f4", | ||||
371 | "name": "instance segmentation", | ||||
372 | "state": "active", | ||||
373 | "vocabulary_id": null | ||||
374 | }, | ||||
375 | { | ||||
376 | "display_name": "object detection", | 183 | "display_name": "object detection", | ||
377 | "id": "607283c7-9e12-4167-9101-7f8078fb6537", | 184 | "id": "607283c7-9e12-4167-9101-7f8078fb6537", | ||
378 | "name": "object detection", | 185 | "name": "object detection", | ||
379 | "state": "active", | 186 | "state": "active", | ||
380 | "vocabulary_id": null | 187 | "vocabulary_id": null | ||
n | 381 | }, | n | ||
382 | { | ||||
383 | "display_name": "semantic segmentation", | ||||
384 | "id": "f9237911-e9df-4dd5-a9aa-301b6d4969af", | ||||
385 | "name": "semantic segmentation", | ||||
386 | "state": "active", | ||||
387 | "vocabulary_id": null | ||||
388 | }, | ||||
389 | { | ||||
390 | "display_name": "visual recognition", | ||||
391 | "id": "48146459-dac0-435d-93fb-a2a932ab2601", | ||||
392 | "name": "visual recognition", | ||||
393 | "state": "active", | ||||
394 | "vocabulary_id": null | ||||
395 | } | 188 | } | ||
396 | ], | 189 | ], | ||
t | 397 | "title": "Microsoft COCO: Common Objects in Context", | t | 190 | "title": "Microsoft COCO: Common objects in context", |
398 | "type": "dataset", | 191 | "type": "dataset", | ||
399 | "version": "" | 192 | "version": "" | ||
400 | } | 193 | } |