Changes
On December 16, 2024 at 6:01:09 PM UTC,
-
Changed value of field
doi_date_published
to2024-12-16
in Stanford Online Products -
Changed value of field
doi_status
toTrue
in Stanford Online Products -
Added resource Original Metadata to Stanford Online Products
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Hyun Oh Song", | 3 | "author": "Hyun Oh Song", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [ | 5 | "citation": [ | ||
6 | "https://doi.org/10.48550/arXiv.1912.03500", | 6 | "https://doi.org/10.48550/arXiv.1912.03500", | ||
7 | "https://doi.org/10.48550/arXiv.2004.01113", | 7 | "https://doi.org/10.48550/arXiv.2004.01113", | ||
8 | "https://doi.org/10.48550/arXiv.2011.08877", | 8 | "https://doi.org/10.48550/arXiv.2011.08877", | ||
9 | "https://doi.org/10.48550/arXiv.1904.10596", | 9 | "https://doi.org/10.48550/arXiv.1904.10596", | ||
10 | "https://doi.org/10.48550/arXiv.2103.15795" | 10 | "https://doi.org/10.48550/arXiv.2103.15795" | ||
11 | ], | 11 | ], | ||
12 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 12 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
13 | "defined_in": "https://doi.org/10.48550/arXiv.2407.03106", | 13 | "defined_in": "https://doi.org/10.48550/arXiv.2407.03106", | ||
14 | "doi": "10.57702/4pdx100r", | 14 | "doi": "10.57702/4pdx100r", | ||
n | 15 | "doi_date_published": null, | n | 15 | "doi_date_published": "2024-12-16", |
16 | "doi_publisher": "TIB", | 16 | "doi_publisher": "TIB", | ||
n | 17 | "doi_status": false, | n | 17 | "doi_status": true, |
18 | "domain": "https://service.tib.eu/ldmservice", | 18 | "domain": "https://service.tib.eu/ldmservice", | ||
19 | "extra_authors": [ | 19 | "extra_authors": [ | ||
20 | { | 20 | { | ||
21 | "extra_author": "Yu Xiang", | 21 | "extra_author": "Yu Xiang", | ||
22 | "orcid": "" | 22 | "orcid": "" | ||
23 | }, | 23 | }, | ||
24 | { | 24 | { | ||
25 | "extra_author": "Stefanie Jegelka", | 25 | "extra_author": "Stefanie Jegelka", | ||
26 | "orcid": "" | 26 | "orcid": "" | ||
27 | }, | 27 | }, | ||
28 | { | 28 | { | ||
29 | "extra_author": "Silvio Savarese", | 29 | "extra_author": "Silvio Savarese", | ||
30 | "orcid": "" | 30 | "orcid": "" | ||
31 | } | 31 | } | ||
32 | ], | 32 | ], | ||
33 | "groups": [ | 33 | "groups": [ | ||
34 | { | 34 | { | ||
35 | "description": "", | 35 | "description": "", | ||
36 | "display_name": "Deep Metric Learning", | 36 | "display_name": "Deep Metric Learning", | ||
37 | "id": "4d60264e-2c9e-4054-9107-8d3199adef93", | 37 | "id": "4d60264e-2c9e-4054-9107-8d3199adef93", | ||
38 | "image_display_url": "", | 38 | "image_display_url": "", | ||
39 | "name": "deep-metric-learning", | 39 | "name": "deep-metric-learning", | ||
40 | "title": "Deep Metric Learning" | 40 | "title": "Deep Metric Learning" | ||
41 | }, | 41 | }, | ||
42 | { | 42 | { | ||
43 | "description": "", | 43 | "description": "", | ||
44 | "display_name": "Image Classification", | 44 | "display_name": "Image Classification", | ||
45 | "id": "18b77292-26aa-4caf-89ed-cbd35fa60474", | 45 | "id": "18b77292-26aa-4caf-89ed-cbd35fa60474", | ||
46 | "image_display_url": "", | 46 | "image_display_url": "", | ||
47 | "name": "image-classification", | 47 | "name": "image-classification", | ||
48 | "title": "Image Classification" | 48 | "title": "Image Classification" | ||
49 | }, | 49 | }, | ||
50 | { | 50 | { | ||
51 | "description": "", | 51 | "description": "", | ||
52 | "display_name": "Image Retrieval", | 52 | "display_name": "Image Retrieval", | ||
53 | "id": "e9e05338-d06d-4467-abf9-ba0f67f4e096", | 53 | "id": "e9e05338-d06d-4467-abf9-ba0f67f4e096", | ||
54 | "image_display_url": "", | 54 | "image_display_url": "", | ||
55 | "name": "image-retrieval", | 55 | "name": "image-retrieval", | ||
56 | "title": "Image Retrieval" | 56 | "title": "Image Retrieval" | ||
57 | }, | 57 | }, | ||
58 | { | 58 | { | ||
59 | "description": "", | 59 | "description": "", | ||
60 | "display_name": "Object Recognition", | 60 | "display_name": "Object Recognition", | ||
61 | "id": "1b471529-d821-46c3-8ac0-1ec99f0c80bc", | 61 | "id": "1b471529-d821-46c3-8ac0-1ec99f0c80bc", | ||
62 | "image_display_url": "", | 62 | "image_display_url": "", | ||
63 | "name": "object-recognition", | 63 | "name": "object-recognition", | ||
64 | "title": "Object Recognition" | 64 | "title": "Object Recognition" | ||
65 | }, | 65 | }, | ||
66 | { | 66 | { | ||
67 | "description": "", | 67 | "description": "", | ||
68 | "display_name": "Product classification", | 68 | "display_name": "Product classification", | ||
69 | "id": "acd4dec2-cfc2-46d6-8d44-7a857e13b351", | 69 | "id": "acd4dec2-cfc2-46d6-8d44-7a857e13b351", | ||
70 | "image_display_url": "", | 70 | "image_display_url": "", | ||
71 | "name": "product-classification", | 71 | "name": "product-classification", | ||
72 | "title": "Product classification" | 72 | "title": "Product classification" | ||
73 | } | 73 | } | ||
74 | ], | 74 | ], | ||
75 | "id": "7fa2d5ad-e4a9-49b1-a440-9e348eddd929", | 75 | "id": "7fa2d5ad-e4a9-49b1-a440-9e348eddd929", | ||
76 | "isopen": false, | 76 | "isopen": false, | ||
77 | "landing_page": | 77 | "landing_page": | ||
78 | "https://ai.stanford.edu/~sijia/Stanford-Online-Products/", | 78 | "https://ai.stanford.edu/~sijia/Stanford-Online-Products/", | ||
79 | "license_title": null, | 79 | "license_title": null, | ||
80 | "link_orkg": "", | 80 | "link_orkg": "", | ||
81 | "metadata_created": "2024-12-16T18:01:07.801580", | 81 | "metadata_created": "2024-12-16T18:01:07.801580", | ||
n | 82 | "metadata_modified": "2024-12-16T18:01:07.801587", | n | 82 | "metadata_modified": "2024-12-16T18:01:08.204836", |
83 | "name": "stanford-online-products", | 83 | "name": "stanford-online-products", | ||
84 | "notes": "The Stanford Online Products (SOP) dataset contains | 84 | "notes": "The Stanford Online Products (SOP) dataset contains | ||
85 | 120,053 product images covering 22,634 categories. The training set is | 85 | 120,053 product images covering 22,634 categories. The training set is | ||
86 | composed of 59,551 images of the first 11,318 categories and the | 86 | composed of 59,551 images of the first 11,318 categories and the | ||
87 | testing set is composed of the remaining 60,502 images of the other | 87 | testing set is composed of the remaining 60,502 images of the other | ||
88 | 11,316 categories.", | 88 | 11,316 categories.", | ||
n | 89 | "num_resources": 0, | n | 89 | "num_resources": 1, |
90 | "num_tags": 15, | 90 | "num_tags": 15, | ||
91 | "organization": { | 91 | "organization": { | ||
92 | "approval_status": "approved", | 92 | "approval_status": "approved", | ||
93 | "created": "2024-11-25T12:11:38.292601", | 93 | "created": "2024-11-25T12:11:38.292601", | ||
94 | "description": "", | 94 | "description": "", | ||
95 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 95 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
96 | "image_url": "", | 96 | "image_url": "", | ||
97 | "is_organization": true, | 97 | "is_organization": true, | ||
98 | "name": "no-organization", | 98 | "name": "no-organization", | ||
99 | "state": "active", | 99 | "state": "active", | ||
100 | "title": "No Organization", | 100 | "title": "No Organization", | ||
101 | "type": "organization" | 101 | "type": "organization" | ||
102 | }, | 102 | }, | ||
103 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 103 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
104 | "private": false, | 104 | "private": false, | ||
105 | "relationships_as_object": [], | 105 | "relationships_as_object": [], | ||
106 | "relationships_as_subject": [], | 106 | "relationships_as_subject": [], | ||
t | 107 | "resources": [], | t | 107 | "resources": [ |
108 | { | ||||
109 | "cache_last_updated": null, | ||||
110 | "cache_url": null, | ||||
111 | "created": "2024-12-16T18:25:32", | ||||
112 | "data": [ | ||||
113 | "dcterms:title", | ||||
114 | "dcterms:accessRights", | ||||
115 | "dcterms:creator", | ||||
116 | "dcterms:description", | ||||
117 | "dcterms:issued", | ||||
118 | "dcterms:language", | ||||
119 | "dcterms:identifier", | ||||
120 | "dcat:theme", | ||||
121 | "dcterms:type", | ||||
122 | "dcat:keyword", | ||||
123 | "dcat:landingPage", | ||||
124 | "dcterms:hasVersion", | ||||
125 | "dcterms:format", | ||||
126 | "mls:task", | ||||
127 | "datacite:isDescribedBy" | ||||
128 | ], | ||||
129 | "description": "The json representation of the dataset with its | ||||
130 | distributions based on DCAT.", | ||||
131 | "format": "JSON", | ||||
132 | "hash": "", | ||||
133 | "id": "cb33dd4f-789e-4b2d-bacd-0c422b3552ff", | ||||
134 | "last_modified": "2024-12-16T18:01:08.195491", | ||||
135 | "metadata_modified": "2024-12-16T18:01:08.207842", | ||||
136 | "mimetype": "application/json", | ||||
137 | "mimetype_inner": null, | ||||
138 | "name": "Original Metadata", | ||||
139 | "package_id": "7fa2d5ad-e4a9-49b1-a440-9e348eddd929", | ||||
140 | "position": 0, | ||||
141 | "resource_type": null, | ||||
142 | "size": 1561, | ||||
143 | "state": "active", | ||||
144 | "url": | ||||
145 | resource/cb33dd4f-789e-4b2d-bacd-0c422b3552ff/download/metadata.json", | ||||
146 | "url_type": "upload" | ||||
147 | } | ||||
148 | ], | ||||
108 | "services_used_list": "", | 149 | "services_used_list": "", | ||
109 | "state": "active", | 150 | "state": "active", | ||
110 | "tags": [ | 151 | "tags": [ | ||
111 | { | 152 | { | ||
112 | "display_name": "Classification", | 153 | "display_name": "Classification", | ||
113 | "id": "cc82e2f5-be18-4e27-9bd8-0cb307b8a455", | 154 | "id": "cc82e2f5-be18-4e27-9bd8-0cb307b8a455", | ||
114 | "name": "Classification", | 155 | "name": "Classification", | ||
115 | "state": "active", | 156 | "state": "active", | ||
116 | "vocabulary_id": null | 157 | "vocabulary_id": null | ||
117 | }, | 158 | }, | ||
118 | { | 159 | { | ||
119 | "display_name": "Deep Metric Learning", | 160 | "display_name": "Deep Metric Learning", | ||
120 | "id": "bdc19e3f-3f72-47ff-bccf-56565ae2cf6d", | 161 | "id": "bdc19e3f-3f72-47ff-bccf-56565ae2cf6d", | ||
121 | "name": "Deep Metric Learning", | 162 | "name": "Deep Metric Learning", | ||
122 | "state": "active", | 163 | "state": "active", | ||
123 | "vocabulary_id": null | 164 | "vocabulary_id": null | ||
124 | }, | 165 | }, | ||
125 | { | 166 | { | ||
126 | "display_name": "Image Clustering", | 167 | "display_name": "Image Clustering", | ||
127 | "id": "297c9070-e77d-409a-a54d-b2bb9de447a4", | 168 | "id": "297c9070-e77d-409a-a54d-b2bb9de447a4", | ||
128 | "name": "Image Clustering", | 169 | "name": "Image Clustering", | ||
129 | "state": "active", | 170 | "state": "active", | ||
130 | "vocabulary_id": null | 171 | "vocabulary_id": null | ||
131 | }, | 172 | }, | ||
132 | { | 173 | { | ||
133 | "display_name": "Image Retrieval", | 174 | "display_name": "Image Retrieval", | ||
134 | "id": "4b81e5bb-1f8f-415b-a688-0e2d94b6d499", | 175 | "id": "4b81e5bb-1f8f-415b-a688-0e2d94b6d499", | ||
135 | "name": "Image Retrieval", | 176 | "name": "Image Retrieval", | ||
136 | "state": "active", | 177 | "state": "active", | ||
137 | "vocabulary_id": null | 178 | "vocabulary_id": null | ||
138 | }, | 179 | }, | ||
139 | { | 180 | { | ||
140 | "display_name": "Object Recognition", | 181 | "display_name": "Object Recognition", | ||
141 | "id": "6a4e0b0a-637f-41ee-a647-8af2e035b203", | 182 | "id": "6a4e0b0a-637f-41ee-a647-8af2e035b203", | ||
142 | "name": "Object Recognition", | 183 | "name": "Object Recognition", | ||
143 | "state": "active", | 184 | "state": "active", | ||
144 | "vocabulary_id": null | 185 | "vocabulary_id": null | ||
145 | }, | 186 | }, | ||
146 | { | 187 | { | ||
147 | "display_name": "Product Images", | 188 | "display_name": "Product Images", | ||
148 | "id": "8587434f-79d6-4590-bd72-7f4bb9da11cd", | 189 | "id": "8587434f-79d6-4590-bd72-7f4bb9da11cd", | ||
149 | "name": "Product Images", | 190 | "name": "Product Images", | ||
150 | "state": "active", | 191 | "state": "active", | ||
151 | "vocabulary_id": null | 192 | "vocabulary_id": null | ||
152 | }, | 193 | }, | ||
153 | { | 194 | { | ||
154 | "display_name": "Products", | 195 | "display_name": "Products", | ||
155 | "id": "f92bf4d4-593c-445e-9afb-fd0763c6697a", | 196 | "id": "f92bf4d4-593c-445e-9afb-fd0763c6697a", | ||
156 | "name": "Products", | 197 | "name": "Products", | ||
157 | "state": "active", | 198 | "state": "active", | ||
158 | "vocabulary_id": null | 199 | "vocabulary_id": null | ||
159 | }, | 200 | }, | ||
160 | { | 201 | { | ||
161 | "display_name": "Stanford Online Products", | 202 | "display_name": "Stanford Online Products", | ||
162 | "id": "0e5c47ed-1554-4267-b811-b84ddffbf2f6", | 203 | "id": "0e5c47ed-1554-4267-b811-b84ddffbf2f6", | ||
163 | "name": "Stanford Online Products", | 204 | "name": "Stanford Online Products", | ||
164 | "state": "active", | 205 | "state": "active", | ||
165 | "vocabulary_id": null | 206 | "vocabulary_id": null | ||
166 | }, | 207 | }, | ||
167 | { | 208 | { | ||
168 | "display_name": "deep metric learning", | 209 | "display_name": "deep metric learning", | ||
169 | "id": "de770a90-90bd-4003-900f-3881066a04c6", | 210 | "id": "de770a90-90bd-4003-900f-3881066a04c6", | ||
170 | "name": "deep metric learning", | 211 | "name": "deep metric learning", | ||
171 | "state": "active", | 212 | "state": "active", | ||
172 | "vocabulary_id": null | 213 | "vocabulary_id": null | ||
173 | }, | 214 | }, | ||
174 | { | 215 | { | ||
175 | "display_name": "image classification", | 216 | "display_name": "image classification", | ||
176 | "id": "34936550-ce1a-41b5-8c58-23081a6c673d", | 217 | "id": "34936550-ce1a-41b5-8c58-23081a6c673d", | ||
177 | "name": "image classification", | 218 | "name": "image classification", | ||
178 | "state": "active", | 219 | "state": "active", | ||
179 | "vocabulary_id": null | 220 | "vocabulary_id": null | ||
180 | }, | 221 | }, | ||
181 | { | 222 | { | ||
182 | "display_name": "image retrieval", | 223 | "display_name": "image retrieval", | ||
183 | "id": "93827714-41d5-4aac-a64a-f4390a0e80e1", | 224 | "id": "93827714-41d5-4aac-a64a-f4390a0e80e1", | ||
184 | "name": "image retrieval", | 225 | "name": "image retrieval", | ||
185 | "state": "active", | 226 | "state": "active", | ||
186 | "vocabulary_id": null | 227 | "vocabulary_id": null | ||
187 | }, | 228 | }, | ||
188 | { | 229 | { | ||
189 | "display_name": "online products", | 230 | "display_name": "online products", | ||
190 | "id": "c178d423-21b2-4dd9-890b-f3d75f2c336f", | 231 | "id": "c178d423-21b2-4dd9-890b-f3d75f2c336f", | ||
191 | "name": "online products", | 232 | "name": "online products", | ||
192 | "state": "active", | 233 | "state": "active", | ||
193 | "vocabulary_id": null | 234 | "vocabulary_id": null | ||
194 | }, | 235 | }, | ||
195 | { | 236 | { | ||
196 | "display_name": "product classification", | 237 | "display_name": "product classification", | ||
197 | "id": "23717da3-4295-4033-b5e2-a57936f34bdd", | 238 | "id": "23717da3-4295-4033-b5e2-a57936f34bdd", | ||
198 | "name": "product classification", | 239 | "name": "product classification", | ||
199 | "state": "active", | 240 | "state": "active", | ||
200 | "vocabulary_id": null | 241 | "vocabulary_id": null | ||
201 | }, | 242 | }, | ||
202 | { | 243 | { | ||
203 | "display_name": "product images", | 244 | "display_name": "product images", | ||
204 | "id": "01b66af0-3e66-4552-aab0-d3fc0aa81a16", | 245 | "id": "01b66af0-3e66-4552-aab0-d3fc0aa81a16", | ||
205 | "name": "product images", | 246 | "name": "product images", | ||
206 | "state": "active", | 247 | "state": "active", | ||
207 | "vocabulary_id": null | 248 | "vocabulary_id": null | ||
208 | }, | 249 | }, | ||
209 | { | 250 | { | ||
210 | "display_name": "products", | 251 | "display_name": "products", | ||
211 | "id": "5347944f-2fd4-463d-9fe9-9a0e5d1d3374", | 252 | "id": "5347944f-2fd4-463d-9fe9-9a0e5d1d3374", | ||
212 | "name": "products", | 253 | "name": "products", | ||
213 | "state": "active", | 254 | "state": "active", | ||
214 | "vocabulary_id": null | 255 | "vocabulary_id": null | ||
215 | } | 256 | } | ||
216 | ], | 257 | ], | ||
217 | "title": "Stanford Online Products", | 258 | "title": "Stanford Online Products", | ||
218 | "type": "dataset", | 259 | "type": "dataset", | ||
219 | "version": "" | 260 | "version": "" | ||
220 | } | 261 | } |