Changes
On December 3, 2024 at 10:29:36 AM UTC, admin:
-
Changed title to Deepfashion (previously DeepFashion)
-
Set author of Deepfashion to Shi Qiu (previously Jinkuan Zhu)
-
Updated description of Deepfashion from
Fashion image retrieval task aims to search relevant clothing items of a query image from the gallery. The previous recipes focus on designing different distance-based loss functions, pulling relevant pairs to be close and pushing irrelevant images apart. However, these methods ignore fine-grained features (e.g. neckband, cuff) of clothing images.
toDeepfashion: Powering robust clothes recognition and retrieval with rich annotations.
-
Removed the following tags from Deepfashion
- attribute-decomposed GAN
- Fashion Attribute
- Image Dataset
- Monocular Image
- DeepFashion
- clothing images
- controllable image synthesis
- fashion
- Clothes Retrieval
- image synthesis
- fashion image retrieval
- person images
- pose guided image synthesis
- person image synthesis
- fashion benchmark
- rich annotations
- Clothing Details
- localization dataset
- fashion item generation
- fine-grained attributes
- Attribute Classification
- fine-grained features
- clothing image analysis
- deep learning
- Clothing Recognition
- Animation-ready
- arbitrary poses
- landmarks
- 2D Reconstruction
- clothing attributes
- Clothed Humans
- segmentation masks
- clothing image
-
Added tag Clothes Recognition to Deepfashion
-
Changed value of field
defined_in
tohttps://doi.org/10.48550/arXiv.1906.00884
in Deepfashion -
Changed value of field
extra_authors
to[{'extra_author': 'Xiaogang Wang', 'orcid': ''}, {'extra_author': 'Ziwei Liu', 'orcid': ''}, {'extra_author': 'Ping Luo', 'orcid': ''}, {'extra_author': 'Xiaoou Tang', 'orcid': ''}]
in Deepfashion -
Changed value of field
citation
to[]
in Deepfashion -
Changed value of field
landing_page
tohttps://www.microsoft.com/en-us/research/publications/deepfashion-powering-robust-clothes-recognition-and-retrieval-with-rich-annotations/
in Deepfashion -
Deleted resource Original Metadata from Deepfashion
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
n | 3 | "author": "Jinkuan Zhu", | n | 3 | "author": "Shi Qiu", |
4 | "author_email": "", | 4 | "author_email": "", | ||
n | 5 | "citation": [ | n | 5 | "citation": [], |
6 | "https://doi.org/10.48550/arXiv.1810.11610", | ||||
7 | "https://doi.org/10.48550/arXiv.2003.00696", | ||||
8 | "https://doi.org/10.48550/arXiv.1910.08292", | ||||
9 | "https://doi.org/10.48550/arXiv.2004.04572", | ||||
10 | "https://doi.org/10.48550/arXiv.1909.13819", | ||||
11 | "https://doi.org/10.48550/arXiv.1905.12794", | ||||
12 | "https://doi.org/10.48550/arXiv.2302.08902", | ||||
13 | "https://doi.org/10.48550/arXiv.2405.15244", | ||||
14 | "https://doi.org/10.48550/arXiv.2005.00419" | ||||
15 | ], | ||||
16 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
n | 17 | "defined_in": "https://doi.org/10.48550/arXiv.2003.12267", | n | 7 | "defined_in": "https://doi.org/10.48550/arXiv.1906.00884", |
18 | "doi": "10.57702/0kbcvyos", | 8 | "doi": "10.57702/0kbcvyos", | ||
19 | "doi_date_published": "2024-12-02", | 9 | "doi_date_published": "2024-12-02", | ||
20 | "doi_publisher": "TIB", | 10 | "doi_publisher": "TIB", | ||
21 | "doi_status": true, | 11 | "doi_status": true, | ||
22 | "domain": "https://service.tib.eu/ldmservice", | 12 | "domain": "https://service.tib.eu/ldmservice", | ||
23 | "extra_authors": [ | 13 | "extra_authors": [ | ||
24 | { | 14 | { | ||
n | 25 | "extra_author": "Hao Huang", | n | 15 | "extra_author": "Xiaogang Wang", |
26 | "orcid": "" | 16 | "orcid": "" | ||
27 | }, | 17 | }, | ||
28 | { | 18 | { | ||
n | 29 | "extra_author": "Qiao Deng", | n | 19 | "extra_author": "Ziwei Liu", |
30 | "orcid": "" | 20 | "orcid": "" | ||
31 | }, | 21 | }, | ||
32 | { | 22 | { | ||
n | n | 23 | "extra_author": "Ping Luo", | ||
24 | "orcid": "" | ||||
25 | }, | ||||
26 | { | ||||
33 | "extra_author": "Xiyao Li", | 27 | "extra_author": "Xiaoou Tang", | ||
34 | "orcid": "" | 28 | "orcid": "" | ||
35 | } | 29 | } | ||
36 | ], | 30 | ], | ||
37 | "groups": [ | 31 | "groups": [ | ||
38 | { | 32 | { | ||
39 | "description": "", | 33 | "description": "", | ||
n | 40 | "display_name": "2D Human Reconstruction", | n | 34 | "display_name": "Clothes Recognition", |
41 | "id": "70de29d5-33f9-474b-a7cf-743c9d1da21a", | 35 | "id": "cf8e008a-6dce-40a3-b0fe-51b1ee68ef0f", | ||
42 | "image_display_url": "", | 36 | "image_display_url": "", | ||
n | 43 | "name": "2d-human-reconstruction", | n | ||
44 | "title": "2D Human Reconstruction" | ||||
45 | }, | ||||
46 | { | ||||
47 | "description": "", | ||||
48 | "display_name": "Clothed Humans", | ||||
49 | "id": "c70c7286-c945-4d20-bd3e-7fe2926b4b11", | ||||
50 | "image_display_url": "", | ||||
51 | "name": "clothed-humans", | ||||
52 | "title": "Clothed Humans" | ||||
53 | }, | ||||
54 | { | ||||
55 | "description": "", | ||||
56 | "display_name": "Clothes Retrieval", | ||||
57 | "id": "00192013-ea99-4441-96fa-4ac8aabbcded", | ||||
58 | "image_display_url": "", | ||||
59 | "name": "clothes-retrieval", | ||||
60 | "title": "Clothes Retrieval" | ||||
61 | }, | ||||
62 | { | ||||
63 | "description": "", | ||||
64 | "display_name": "Clothing Attribute Prediction", | ||||
65 | "id": "0d68ed68-6fcf-4117-a3e2-c7b5ea51ee50", | ||||
66 | "image_display_url": "", | ||||
67 | "name": "clothing-attribute-prediction", | ||||
68 | "title": "Clothing Attribute Prediction" | ||||
69 | }, | ||||
70 | { | ||||
71 | "description": "", | ||||
72 | "display_name": "Clothing Image Analysis", | ||||
73 | "id": "e86f47dd-6152-4370-a5df-6a7d053ba368", | ||||
74 | "image_display_url": "", | ||||
75 | "name": "clothing-image-analysis", | ||||
76 | "title": "Clothing Image Analysis" | ||||
77 | }, | ||||
78 | { | ||||
79 | "description": "", | ||||
80 | "display_name": "Clothing Recognition", | ||||
81 | "id": "4738dcdc-4993-4932-92ba-a0faba36fccb", | ||||
82 | "image_display_url": "", | ||||
83 | "name": "clothing-recognition", | 37 | "name": "clothes-recognition", | ||
84 | "title": "Clothing Recognition" | 38 | "title": "Clothes Recognition" | ||
85 | }, | ||||
86 | { | ||||
87 | "description": "", | ||||
88 | "display_name": "Fashion", | ||||
89 | "id": "cfe360ba-f0f6-4eb6-abb1-6b98dc8e179f", | ||||
90 | "image_display_url": "", | ||||
91 | "name": "fashion", | ||||
92 | "title": "Fashion" | ||||
93 | }, | ||||
94 | { | ||||
95 | "description": "", | ||||
96 | "display_name": "Fashion Attribute", | ||||
97 | "id": "9a4b12de-10c1-4a8c-b466-ef64ed16ba09", | ||||
98 | "image_display_url": "", | ||||
99 | "name": "fashion-attribute", | ||||
100 | "title": "Fashion Attribute" | ||||
101 | }, | ||||
102 | { | ||||
103 | "description": "", | ||||
104 | "display_name": "Fashion Attributes Detection", | ||||
105 | "id": "c4c078a4-b604-4444-a063-dc9a4816d8d6", | ||||
106 | "image_display_url": "", | ||||
107 | "name": "fashion-attributes-detection", | ||||
108 | "title": "Fashion Attributes Detection" | ||||
109 | }, | ||||
110 | { | ||||
111 | "description": "", | ||||
112 | "display_name": "Fashion Image Retrieval", | ||||
113 | "id": "00e971c1-1ba2-495d-87ab-83c2384a6682", | ||||
114 | "image_display_url": "", | ||||
115 | "name": "fashion-image-retrieval", | ||||
116 | "title": "Fashion Image Retrieval" | ||||
117 | }, | ||||
118 | { | ||||
119 | "description": "", | ||||
120 | "display_name": "Fashion Image Understanding", | ||||
121 | "id": "867d887a-b63f-4c1d-8525-d37c0e228564", | ||||
122 | "image_display_url": "", | ||||
123 | "name": "fashion-image-understanding", | ||||
124 | "title": "Fashion Image Understanding" | ||||
125 | }, | ||||
126 | { | ||||
127 | "description": "", | ||||
128 | "display_name": "Fashion Item Generation", | ||||
129 | "id": "16053d71-880e-4f61-af44-ac65c26b9587", | ||||
130 | "image_display_url": "", | ||||
131 | "name": "fashion-item-generation", | ||||
132 | "title": "Fashion Item Generation" | ||||
133 | }, | ||||
134 | { | ||||
135 | "description": "", | ||||
136 | "display_name": "Image Dataset", | ||||
137 | "id": "fc745cca-b21e-4ced-ba81-06a456938edf", | ||||
138 | "image_display_url": "", | ||||
139 | "name": "image-dataset", | ||||
140 | "title": "Image Dataset" | ||||
141 | }, | 39 | }, | ||
142 | { | 40 | { | ||
143 | "description": "", | 41 | "description": "", | ||
144 | "display_name": "Image Retrieval", | 42 | "display_name": "Image Retrieval", | ||
145 | "id": "e9e05338-d06d-4467-abf9-ba0f67f4e096", | 43 | "id": "e9e05338-d06d-4467-abf9-ba0f67f4e096", | ||
146 | "image_display_url": "", | 44 | "image_display_url": "", | ||
147 | "name": "image-retrieval", | 45 | "name": "image-retrieval", | ||
148 | "title": "Image Retrieval" | 46 | "title": "Image Retrieval" | ||
n | 149 | }, | n | ||
150 | { | ||||
151 | "description": "", | ||||
152 | "display_name": "Image Synthesis", | ||||
153 | "id": "8b89ca6b-96f7-439b-8045-febd38230620", | ||||
154 | "image_display_url": "", | ||||
155 | "name": "image-synthesis", | ||||
156 | "title": "Image Synthesis" | ||||
157 | }, | ||||
158 | { | ||||
159 | "description": "", | ||||
160 | "display_name": "Monocular Image", | ||||
161 | "id": "089b7614-976e-4a12-aecf-098111ad32db", | ||||
162 | "image_display_url": "", | ||||
163 | "name": "monocular-image", | ||||
164 | "title": "Monocular Image" | ||||
165 | }, | ||||
166 | { | ||||
167 | "description": "", | ||||
168 | "display_name": "Person Image Synthesis", | ||||
169 | "id": "e25aac29-3896-44c6-89f0-a245cc6b2d12", | ||||
170 | "image_display_url": "", | ||||
171 | "name": "person-image-synthesis", | ||||
172 | "title": "Person Image Synthesis" | ||||
173 | }, | ||||
174 | { | ||||
175 | "description": "", | ||||
176 | "display_name": "Pose Guided Image Synthesis", | ||||
177 | "id": "73d75cfd-b273-475d-8f52-6306c717d7b4", | ||||
178 | "image_display_url": "", | ||||
179 | "name": "pose-guided-image-synthesis", | ||||
180 | "title": "Pose Guided Image Synthesis" | ||||
181 | } | 47 | } | ||
182 | ], | 48 | ], | ||
183 | "id": "e9e7aee4-374f-4748-b8e2-059fba75a70c", | 49 | "id": "e9e7aee4-374f-4748-b8e2-059fba75a70c", | ||
184 | "isopen": false, | 50 | "isopen": false, | ||
n | 185 | "landing_page": "https://mmlab.ie.cuhk.edu.hk/~xliyao/deepfashion/", | n | 51 | "landing_page": |
52 | ring-robust-clothes-recognition-and-retrieval-with-rich-annotations/", | ||||
186 | "license_title": null, | 53 | "license_title": null, | ||
187 | "link_orkg": "", | 54 | "link_orkg": "", | ||
188 | "metadata_created": "2024-12-02T18:26:11.257936", | 55 | "metadata_created": "2024-12-02T18:26:11.257936", | ||
n | 189 | "metadata_modified": "2024-12-02T18:26:11.704834", | n | 56 | "metadata_modified": "2024-12-03T10:29:35.740093", |
190 | "name": "deepfashion", | 57 | "name": "deepfashion", | ||
n | 191 | "notes": "Fashion image retrieval task aims to search relevant | n | 58 | "notes": "Deepfashion: Powering robust clothes recognition and |
192 | clothing items of a query image from the gallery. The previous recipes | 59 | retrieval with rich annotations.", | ||
193 | focus on designing different distance-based loss functions, pulling | ||||
194 | relevant pairs to be close and pushing irrelevant images apart. | ||||
195 | However, these methods ignore fine-grained features (e.g. neckband, | ||||
196 | cuff) of clothing images.", | ||||
197 | "num_resources": 1, | 60 | "num_resources": 0, | ||
198 | "num_tags": 35, | 61 | "num_tags": 3, | ||
199 | "organization": { | 62 | "organization": { | ||
200 | "approval_status": "approved", | 63 | "approval_status": "approved", | ||
201 | "created": "2024-11-25T12:11:38.292601", | 64 | "created": "2024-11-25T12:11:38.292601", | ||
202 | "description": "", | 65 | "description": "", | ||
203 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 66 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
204 | "image_url": "", | 67 | "image_url": "", | ||
205 | "is_organization": true, | 68 | "is_organization": true, | ||
206 | "name": "no-organization", | 69 | "name": "no-organization", | ||
207 | "state": "active", | 70 | "state": "active", | ||
208 | "title": "No Organization", | 71 | "title": "No Organization", | ||
209 | "type": "organization" | 72 | "type": "organization" | ||
210 | }, | 73 | }, | ||
211 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 74 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
212 | "private": false, | 75 | "private": false, | ||
213 | "relationships_as_object": [], | 76 | "relationships_as_object": [], | ||
214 | "relationships_as_subject": [], | 77 | "relationships_as_subject": [], | ||
n | 215 | "resources": [ | n | 78 | "resources": [], |
216 | { | ||||
217 | "cache_last_updated": null, | ||||
218 | "cache_url": null, | ||||
219 | "created": "2024-12-02T18:38:42", | ||||
220 | "data": [ | ||||
221 | "dcterms:title", | ||||
222 | "dcterms:accessRights", | ||||
223 | "dcterms:creator", | ||||
224 | "dcterms:description", | ||||
225 | "dcterms:issued", | ||||
226 | "dcterms:language", | ||||
227 | "dcterms:identifier", | ||||
228 | "dcat:theme", | ||||
229 | "dcterms:type", | ||||
230 | "dcat:keyword", | ||||
231 | "dcat:landingPage", | ||||
232 | "dcterms:hasVersion", | ||||
233 | "dcterms:format", | ||||
234 | "mls:task", | ||||
235 | "datacite:isDescribedBy" | ||||
236 | ], | ||||
237 | "description": "The json representation of the dataset with its | ||||
238 | distributions based on DCAT.", | ||||
239 | "format": "JSON", | ||||
240 | "hash": "", | ||||
241 | "id": "d1b89bc9-24af-41ff-97b6-1044a4b0ca4c", | ||||
242 | "last_modified": "2024-12-02T18:26:11.691997", | ||||
243 | "metadata_modified": "2024-12-02T18:26:11.707530", | ||||
244 | "mimetype": "application/json", | ||||
245 | "mimetype_inner": null, | ||||
246 | "name": "Original Metadata", | ||||
247 | "package_id": "e9e7aee4-374f-4748-b8e2-059fba75a70c", | ||||
248 | "position": 0, | ||||
249 | "resource_type": null, | ||||
250 | "size": 2777, | ||||
251 | "state": "active", | ||||
252 | "url": | ||||
253 | resource/d1b89bc9-24af-41ff-97b6-1044a4b0ca4c/download/metadata.json", | ||||
254 | "url_type": "upload" | ||||
255 | } | ||||
256 | ], | ||||
257 | "services_used_list": "", | 79 | "services_used_list": "", | ||
258 | "state": "active", | 80 | "state": "active", | ||
259 | "tags": [ | 81 | "tags": [ | ||
260 | { | 82 | { | ||
n | 261 | "display_name": "2D Reconstruction", | n | ||
262 | "id": "a3ae23a2-bbd8-44fb-814e-a4935fe38a76", | ||||
263 | "name": "2D Reconstruction", | ||||
264 | "state": "active", | ||||
265 | "vocabulary_id": null | ||||
266 | }, | ||||
267 | { | ||||
268 | "display_name": "Animation-ready", | ||||
269 | "id": "80911826-0fc2-4d27-989a-310930ca5a6a", | ||||
270 | "name": "Animation-ready", | ||||
271 | "state": "active", | ||||
272 | "vocabulary_id": null | ||||
273 | }, | ||||
274 | { | ||||
275 | "display_name": "Attribute Classification", | ||||
276 | "id": "478c5578-5e06-465e-9681-dc2904cbc556", | ||||
277 | "name": "Attribute Classification", | ||||
278 | "state": "active", | ||||
279 | "vocabulary_id": null | ||||
280 | }, | ||||
281 | { | ||||
282 | "display_name": "Clothed Humans", | ||||
283 | "id": "bca31b30-5db8-4363-92e9-fd023961335c", | ||||
284 | "name": "Clothed Humans", | ||||
285 | "state": "active", | ||||
286 | "vocabulary_id": null | ||||
287 | }, | ||||
288 | { | ||||
289 | "display_name": "Clothes Retrieval", | ||||
290 | "id": "7f599fed-85fd-4d16-9374-5a31c2023181", | ||||
291 | "name": "Clothes Retrieval", | ||||
292 | "state": "active", | ||||
293 | "vocabulary_id": null | ||||
294 | }, | ||||
295 | { | ||||
296 | "display_name": "Clothing Details", | ||||
297 | "id": "ad269d36-0f5b-4007-9354-d973a25ca114", | ||||
298 | "name": "Clothing Details", | ||||
299 | "state": "active", | ||||
300 | "vocabulary_id": null | ||||
301 | }, | ||||
302 | { | ||||
303 | "display_name": "Clothing Recognition", | 83 | "display_name": "Clothes Recognition", | ||
304 | "id": "7588c56c-a176-4103-8d2e-266c0998cab2", | 84 | "id": "982d62b1-0139-42e7-b65a-070f3ed16c1c", | ||
305 | "name": "Clothing Recognition", | 85 | "name": "Clothes Recognition", | ||
306 | "state": "active", | 86 | "state": "active", | ||
307 | "vocabulary_id": null | 87 | "vocabulary_id": null | ||
308 | }, | 88 | }, | ||
309 | { | 89 | { | ||
310 | "display_name": "Deep Learning", | 90 | "display_name": "Deep Learning", | ||
311 | "id": "3feb7b21-e049-4dca-9372-0d438c483f6a", | 91 | "id": "3feb7b21-e049-4dca-9372-0d438c483f6a", | ||
312 | "name": "Deep Learning", | 92 | "name": "Deep Learning", | ||
313 | "state": "active", | 93 | "state": "active", | ||
314 | "vocabulary_id": null | 94 | "vocabulary_id": null | ||
315 | }, | 95 | }, | ||
316 | { | 96 | { | ||
n | 317 | "display_name": "DeepFashion", | n | ||
318 | "id": "6092e498-3fab-4e3f-91b6-d9e8a67c07e1", | ||||
319 | "name": "DeepFashion", | ||||
320 | "state": "active", | ||||
321 | "vocabulary_id": null | ||||
322 | }, | ||||
323 | { | ||||
324 | "display_name": "Fashion Attribute", | ||||
325 | "id": "b473337f-4c61-4c24-b9e0-6c0bfb1878f3", | ||||
326 | "name": "Fashion Attribute", | ||||
327 | "state": "active", | ||||
328 | "vocabulary_id": null | ||||
329 | }, | ||||
330 | { | ||||
331 | "display_name": "Image Dataset", | ||||
332 | "id": "51aed645-6dd9-4e08-894a-10944ecefd8b", | ||||
333 | "name": "Image Dataset", | ||||
334 | "state": "active", | ||||
335 | "vocabulary_id": null | ||||
336 | }, | ||||
337 | { | ||||
338 | "display_name": "Image Retrieval", | 97 | "display_name": "Image Retrieval", | ||
339 | "id": "4b81e5bb-1f8f-415b-a688-0e2d94b6d499", | 98 | "id": "4b81e5bb-1f8f-415b-a688-0e2d94b6d499", | ||
340 | "name": "Image Retrieval", | 99 | "name": "Image Retrieval", | ||
341 | "state": "active", | 100 | "state": "active", | ||
342 | "vocabulary_id": null | 101 | "vocabulary_id": null | ||
n | 343 | }, | n | ||
344 | { | ||||
345 | "display_name": "Monocular Image", | ||||
346 | "id": "54b7dbe1-0ad7-4aaf-8c83-de4ebb1c14c6", | ||||
347 | "name": "Monocular Image", | ||||
348 | "state": "active", | ||||
349 | "vocabulary_id": null | ||||
350 | }, | ||||
351 | { | ||||
352 | "display_name": "arbitrary poses", | ||||
353 | "id": "aef03447-8203-401e-907f-0bf8070ee575", | ||||
354 | "name": "arbitrary poses", | ||||
355 | "state": "active", | ||||
356 | "vocabulary_id": null | ||||
357 | }, | ||||
358 | { | ||||
359 | "display_name": "attribute-decomposed GAN", | ||||
360 | "id": "83f09518-99de-4679-883f-c768c856df06", | ||||
361 | "name": "attribute-decomposed GAN", | ||||
362 | "state": "active", | ||||
363 | "vocabulary_id": null | ||||
364 | }, | ||||
365 | { | ||||
366 | "display_name": "clothing attributes", | ||||
367 | "id": "38d378f9-5376-4879-8363-72b1b5a1c318", | ||||
368 | "name": "clothing attributes", | ||||
369 | "state": "active", | ||||
370 | "vocabulary_id": null | ||||
371 | }, | ||||
372 | { | ||||
373 | "display_name": "clothing image", | ||||
374 | "id": "84574672-5254-4b73-9448-8dcccd529b35", | ||||
375 | "name": "clothing image", | ||||
376 | "state": "active", | ||||
377 | "vocabulary_id": null | ||||
378 | }, | ||||
379 | { | ||||
380 | "display_name": "clothing image analysis", | ||||
381 | "id": "b50f06cc-b331-448a-9bb2-6229ace24d51", | ||||
382 | "name": "clothing image analysis", | ||||
383 | "state": "active", | ||||
384 | "vocabulary_id": null | ||||
385 | }, | ||||
386 | { | ||||
387 | "display_name": "clothing images", | ||||
388 | "id": "11b1d51c-ab80-416c-bb4d-2d0441c3fc99", | ||||
389 | "name": "clothing images", | ||||
390 | "state": "active", | ||||
391 | "vocabulary_id": null | ||||
392 | }, | ||||
393 | { | ||||
394 | "display_name": "controllable image synthesis", | ||||
395 | "id": "600f4aca-7f47-4c66-8b0b-be1ce7e6b54f", | ||||
396 | "name": "controllable image synthesis", | ||||
397 | "state": "active", | ||||
398 | "vocabulary_id": null | ||||
399 | }, | ||||
400 | { | ||||
401 | "display_name": "deep learning", | ||||
402 | "id": "19e41883-3799-4184-9e0e-26c95795b119", | ||||
403 | "name": "deep learning", | ||||
404 | "state": "active", | ||||
405 | "vocabulary_id": null | ||||
406 | }, | ||||
407 | { | ||||
408 | "display_name": "fashion", | ||||
409 | "id": "d0cb2d67-aeb2-4629-bc32-db590d2dbef1", | ||||
410 | "name": "fashion", | ||||
411 | "state": "active", | ||||
412 | "vocabulary_id": null | ||||
413 | }, | ||||
414 | { | ||||
415 | "display_name": "fashion benchmark", | ||||
416 | "id": "0cf9e67c-85f4-4eb6-8b16-6efb29be563f", | ||||
417 | "name": "fashion benchmark", | ||||
418 | "state": "active", | ||||
419 | "vocabulary_id": null | ||||
420 | }, | ||||
421 | { | ||||
422 | "display_name": "fashion image retrieval", | ||||
423 | "id": "692758c8-7468-409e-be5f-5ec1cbabb472", | ||||
424 | "name": "fashion image retrieval", | ||||
425 | "state": "active", | ||||
426 | "vocabulary_id": null | ||||
427 | }, | ||||
428 | { | ||||
429 | "display_name": "fashion item generation", | ||||
430 | "id": "391837ad-545e-48dc-a2da-e6f0b61467b4", | ||||
431 | "name": "fashion item generation", | ||||
432 | "state": "active", | ||||
433 | "vocabulary_id": null | ||||
434 | }, | ||||
435 | { | ||||
436 | "display_name": "fine-grained attributes", | ||||
437 | "id": "dcd5984d-fd6d-4415-9f5e-52ca4e01b5c2", | ||||
438 | "name": "fine-grained attributes", | ||||
439 | "state": "active", | ||||
440 | "vocabulary_id": null | ||||
441 | }, | ||||
442 | { | ||||
443 | "display_name": "fine-grained features", | ||||
444 | "id": "d9a2050f-6adb-4da5-9af3-ed0664c57619", | ||||
445 | "name": "fine-grained features", | ||||
446 | "state": "active", | ||||
447 | "vocabulary_id": null | ||||
448 | }, | ||||
449 | { | ||||
450 | "display_name": "image synthesis", | ||||
451 | "id": "c2b0a4a1-48f7-4f89-82b9-cd118596dfc5", | ||||
452 | "name": "image synthesis", | ||||
453 | "state": "active", | ||||
454 | "vocabulary_id": null | ||||
455 | }, | ||||
456 | { | ||||
457 | "display_name": "landmarks", | ||||
458 | "id": "7944b1ba-1117-4a26-b377-530acabce8a4", | ||||
459 | "name": "landmarks", | ||||
460 | "state": "active", | ||||
461 | "vocabulary_id": null | ||||
462 | }, | ||||
463 | { | ||||
464 | "display_name": "localization dataset", | ||||
465 | "id": "67d37285-d087-40da-9e7c-0697f161e02b", | ||||
466 | "name": "localization dataset", | ||||
467 | "state": "active", | ||||
468 | "vocabulary_id": null | ||||
469 | }, | ||||
470 | { | ||||
471 | "display_name": "person image synthesis", | ||||
472 | "id": "b12cbaca-e928-4ab7-a6f2-f45f2a356f3f", | ||||
473 | "name": "person image synthesis", | ||||
474 | "state": "active", | ||||
475 | "vocabulary_id": null | ||||
476 | }, | ||||
477 | { | ||||
478 | "display_name": "person images", | ||||
479 | "id": "94bdd951-40b2-40c4-a9e0-f10cd424c1f8", | ||||
480 | "name": "person images", | ||||
481 | "state": "active", | ||||
482 | "vocabulary_id": null | ||||
483 | }, | ||||
484 | { | ||||
485 | "display_name": "pose guided image synthesis", | ||||
486 | "id": "59ddc76d-95b6-47ab-afb4-68f90839aa6e", | ||||
487 | "name": "pose guided image synthesis", | ||||
488 | "state": "active", | ||||
489 | "vocabulary_id": null | ||||
490 | }, | ||||
491 | { | ||||
492 | "display_name": "rich annotations", | ||||
493 | "id": "88689540-a0df-41b4-a1d8-252a8ef3d71e", | ||||
494 | "name": "rich annotations", | ||||
495 | "state": "active", | ||||
496 | "vocabulary_id": null | ||||
497 | }, | ||||
498 | { | ||||
499 | "display_name": "segmentation masks", | ||||
500 | "id": "b833a1ff-2848-4f36-9446-2145686904bf", | ||||
501 | "name": "segmentation masks", | ||||
502 | "state": "active", | ||||
503 | "vocabulary_id": null | ||||
504 | } | 102 | } | ||
505 | ], | 103 | ], | ||
t | 506 | "title": "DeepFashion", | t | 104 | "title": "Deepfashion", |
507 | "type": "dataset", | 105 | "type": "dataset", | ||
508 | "version": "" | 106 | "version": "" | ||
509 | } | 107 | } |