Changes
On January 2, 2025 at 9:31:18 PM UTC, admin:
-
Changed value of field
doi_status
toTrue
in StrategyQA -
Changed value of field
doi_date_published
to2025-01-02
in StrategyQA -
Added resource Original Metadata to StrategyQA
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Mor Geva", | 3 | "author": "Mor Geva", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [ | 5 | "citation": [ | ||
6 | "https://doi.org/10.48550/arXiv.2406.13929", | 6 | "https://doi.org/10.48550/arXiv.2406.13929", | ||
7 | "https://doi.org/10.48550/arXiv.2212.09656", | 7 | "https://doi.org/10.48550/arXiv.2212.09656", | ||
8 | "https://doi.org/10.48550/arXiv.2403.01390", | 8 | "https://doi.org/10.48550/arXiv.2403.01390", | ||
9 | "https://doi.org/10.48550/arXiv.2402.10612", | 9 | "https://doi.org/10.48550/arXiv.2402.10612", | ||
10 | "https://doi.org/10.48550/arXiv.2402.18678", | 10 | "https://doi.org/10.48550/arXiv.2402.18678", | ||
11 | "https://doi.org/10.48550/arXiv.2309.13075" | 11 | "https://doi.org/10.48550/arXiv.2309.13075" | ||
12 | ], | 12 | ], | ||
13 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 13 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
14 | "defined_in": "https://doi.org/10.48550/arXiv.2302.12246", | 14 | "defined_in": "https://doi.org/10.48550/arXiv.2302.12246", | ||
15 | "doi": "10.57702/nkmvf3lj", | 15 | "doi": "10.57702/nkmvf3lj", | ||
n | 16 | "doi_date_published": null, | n | 16 | "doi_date_published": "2025-01-02", |
17 | "doi_publisher": "TIB", | 17 | "doi_publisher": "TIB", | ||
n | 18 | "doi_status": false, | n | 18 | "doi_status": true, |
19 | "domain": "https://service.tib.eu/ldmservice", | 19 | "domain": "https://service.tib.eu/ldmservice", | ||
20 | "extra_authors": [ | 20 | "extra_authors": [ | ||
21 | { | 21 | { | ||
22 | "extra_author": "Daniel Khashabi", | 22 | "extra_author": "Daniel Khashabi", | ||
23 | "orcid": "" | 23 | "orcid": "" | ||
24 | }, | 24 | }, | ||
25 | { | 25 | { | ||
26 | "extra_author": "Elad Segal", | 26 | "extra_author": "Elad Segal", | ||
27 | "orcid": "" | 27 | "orcid": "" | ||
28 | }, | 28 | }, | ||
29 | { | 29 | { | ||
30 | "extra_author": "Tushar Khot", | 30 | "extra_author": "Tushar Khot", | ||
31 | "orcid": "" | 31 | "orcid": "" | ||
32 | }, | 32 | }, | ||
33 | { | 33 | { | ||
34 | "extra_author": "Dan Roth", | 34 | "extra_author": "Dan Roth", | ||
35 | "orcid": "" | 35 | "orcid": "" | ||
36 | }, | 36 | }, | ||
37 | { | 37 | { | ||
38 | "extra_author": "Jonathan Berant", | 38 | "extra_author": "Jonathan Berant", | ||
39 | "orcid": "" | 39 | "orcid": "" | ||
40 | } | 40 | } | ||
41 | ], | 41 | ], | ||
42 | "groups": [ | 42 | "groups": [ | ||
43 | { | 43 | { | ||
44 | "description": "", | 44 | "description": "", | ||
45 | "display_name": "Commonsense Reasoning", | 45 | "display_name": "Commonsense Reasoning", | ||
46 | "id": "3be75146-9a95-41b8-9005-5774447957de", | 46 | "id": "3be75146-9a95-41b8-9005-5774447957de", | ||
47 | "image_display_url": "", | 47 | "image_display_url": "", | ||
48 | "name": "commonsense-reasoning", | 48 | "name": "commonsense-reasoning", | ||
49 | "title": "Commonsense Reasoning" | 49 | "title": "Commonsense Reasoning" | ||
50 | }, | 50 | }, | ||
51 | { | 51 | { | ||
52 | "description": "", | 52 | "description": "", | ||
53 | "display_name": "Multi-Hop", | 53 | "display_name": "Multi-Hop", | ||
54 | "id": "e0a73f25-d456-4171-907b-149dcfd9c52b", | 54 | "id": "e0a73f25-d456-4171-907b-149dcfd9c52b", | ||
55 | "image_display_url": "", | 55 | "image_display_url": "", | ||
56 | "name": "multi-hop", | 56 | "name": "multi-hop", | ||
57 | "title": "Multi-Hop" | 57 | "title": "Multi-Hop" | ||
58 | }, | 58 | }, | ||
59 | { | 59 | { | ||
60 | "description": "", | 60 | "description": "", | ||
61 | "display_name": "Question Answering", | 61 | "display_name": "Question Answering", | ||
62 | "id": "01e9bd6b-910d-421d-bb31-799604023211", | 62 | "id": "01e9bd6b-910d-421d-bb31-799604023211", | ||
63 | "image_display_url": "", | 63 | "image_display_url": "", | ||
64 | "name": "question-answering", | 64 | "name": "question-answering", | ||
65 | "title": "Question Answering" | 65 | "title": "Question Answering" | ||
66 | } | 66 | } | ||
67 | ], | 67 | ], | ||
68 | "id": "177dcac4-5208-4a79-8b3a-a555dc870013", | 68 | "id": "177dcac4-5208-4a79-8b3a-a555dc870013", | ||
69 | "isopen": false, | 69 | "isopen": false, | ||
70 | "landing_page": | 70 | "landing_page": | ||
71 | "https://huggingface.co/datasets/metaeval/strategy-qa", | 71 | "https://huggingface.co/datasets/metaeval/strategy-qa", | ||
72 | "license_title": null, | 72 | "license_title": null, | ||
73 | "link_orkg": "", | 73 | "link_orkg": "", | ||
74 | "metadata_created": "2025-01-02T21:31:17.177051", | 74 | "metadata_created": "2025-01-02T21:31:17.177051", | ||
n | 75 | "metadata_modified": "2025-01-02T21:31:17.177059", | n | 75 | "metadata_modified": "2025-01-02T21:31:17.745890", |
76 | "name": "strategyqa", | 76 | "name": "strategyqa", | ||
77 | "notes": "The StrategyQA dataset is used to evaluate the ability of | 77 | "notes": "The StrategyQA dataset is used to evaluate the ability of | ||
78 | LLMs in generating accurate answers to multi-step reasoning | 78 | LLMs in generating accurate answers to multi-step reasoning | ||
79 | questions.", | 79 | questions.", | ||
n | 80 | "num_resources": 0, | n | 80 | "num_resources": 1, |
81 | "num_tags": 16, | 81 | "num_tags": 16, | ||
82 | "organization": { | 82 | "organization": { | ||
83 | "approval_status": "approved", | 83 | "approval_status": "approved", | ||
84 | "created": "2024-11-25T12:11:38.292601", | 84 | "created": "2024-11-25T12:11:38.292601", | ||
85 | "description": "", | 85 | "description": "", | ||
86 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 86 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
87 | "image_url": "", | 87 | "image_url": "", | ||
88 | "is_organization": true, | 88 | "is_organization": true, | ||
89 | "name": "no-organization", | 89 | "name": "no-organization", | ||
90 | "state": "active", | 90 | "state": "active", | ||
91 | "title": "No Organization", | 91 | "title": "No Organization", | ||
92 | "type": "organization" | 92 | "type": "organization" | ||
93 | }, | 93 | }, | ||
94 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 94 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
95 | "private": false, | 95 | "private": false, | ||
96 | "relationships_as_object": [], | 96 | "relationships_as_object": [], | ||
97 | "relationships_as_subject": [], | 97 | "relationships_as_subject": [], | ||
t | 98 | "resources": [], | t | 98 | "resources": [ |
99 | { | ||||
100 | "cache_last_updated": null, | ||||
101 | "cache_url": null, | ||||
102 | "created": "2025-01-02T21:52:27", | ||||
103 | "data": [ | ||||
104 | "dcterms:title", | ||||
105 | "dcterms:accessRights", | ||||
106 | "dcterms:creator", | ||||
107 | "dcterms:description", | ||||
108 | "dcterms:issued", | ||||
109 | "dcterms:language", | ||||
110 | "dcterms:identifier", | ||||
111 | "dcat:theme", | ||||
112 | "dcterms:type", | ||||
113 | "dcat:keyword", | ||||
114 | "dcat:landingPage", | ||||
115 | "dcterms:hasVersion", | ||||
116 | "dcterms:format", | ||||
117 | "mls:task", | ||||
118 | "datacite:isDescribedBy" | ||||
119 | ], | ||||
120 | "description": "The json representation of the dataset with its | ||||
121 | distributions based on DCAT.", | ||||
122 | "format": "JSON", | ||||
123 | "hash": "", | ||||
124 | "id": "68d73a19-2eff-4ec9-85a0-6b1bf3eb60da", | ||||
125 | "last_modified": "2025-01-02T21:31:17.736655", | ||||
126 | "metadata_modified": "2025-01-02T21:31:17.748725", | ||||
127 | "mimetype": "application/json", | ||||
128 | "mimetype_inner": null, | ||||
129 | "name": "Original Metadata", | ||||
130 | "package_id": "177dcac4-5208-4a79-8b3a-a555dc870013", | ||||
131 | "position": 0, | ||||
132 | "resource_type": null, | ||||
133 | "size": 1363, | ||||
134 | "state": "active", | ||||
135 | "url": | ||||
136 | resource/68d73a19-2eff-4ec9-85a0-6b1bf3eb60da/download/metadata.json", | ||||
137 | "url_type": "upload" | ||||
138 | } | ||||
139 | ], | ||||
99 | "services_used_list": "", | 140 | "services_used_list": "", | ||
100 | "state": "active", | 141 | "state": "active", | ||
101 | "tags": [ | 142 | "tags": [ | ||
102 | { | 143 | { | ||
103 | "display_name": "QA dataset", | 144 | "display_name": "QA dataset", | ||
104 | "id": "d3e45412-cfcd-4a0e-b91e-4eb408d1075b", | 145 | "id": "d3e45412-cfcd-4a0e-b91e-4eb408d1075b", | ||
105 | "name": "QA dataset", | 146 | "name": "QA dataset", | ||
106 | "state": "active", | 147 | "state": "active", | ||
107 | "vocabulary_id": null | 148 | "vocabulary_id": null | ||
108 | }, | 149 | }, | ||
109 | { | 150 | { | ||
110 | "display_name": "StrategyQA", | 151 | "display_name": "StrategyQA", | ||
111 | "id": "cb0be26f-de55-4ed3-8166-3d1e58b3f424", | 152 | "id": "cb0be26f-de55-4ed3-8166-3d1e58b3f424", | ||
112 | "name": "StrategyQA", | 153 | "name": "StrategyQA", | ||
113 | "state": "active", | 154 | "state": "active", | ||
114 | "vocabulary_id": null | 155 | "vocabulary_id": null | ||
115 | }, | 156 | }, | ||
116 | { | 157 | { | ||
117 | "display_name": "Wikipedia articles", | 158 | "display_name": "Wikipedia articles", | ||
118 | "id": "0b7378d3-9081-431f-9985-df07ef768343", | 159 | "id": "0b7378d3-9081-431f-9985-df07ef768343", | ||
119 | "name": "Wikipedia articles", | 160 | "name": "Wikipedia articles", | ||
120 | "state": "active", | 161 | "state": "active", | ||
121 | "vocabulary_id": null | 162 | "vocabulary_id": null | ||
122 | }, | 163 | }, | ||
123 | { | 164 | { | ||
124 | "display_name": "cognitive tasks", | 165 | "display_name": "cognitive tasks", | ||
125 | "id": "3e682194-c52f-465a-91c8-029cfc4825c5", | 166 | "id": "3e682194-c52f-465a-91c8-029cfc4825c5", | ||
126 | "name": "cognitive tasks", | 167 | "name": "cognitive tasks", | ||
127 | "state": "active", | 168 | "state": "active", | ||
128 | "vocabulary_id": null | 169 | "vocabulary_id": null | ||
129 | }, | 170 | }, | ||
130 | { | 171 | { | ||
131 | "display_name": "commonsense reasoning", | 172 | "display_name": "commonsense reasoning", | ||
132 | "id": "52066668-35e4-47a9-9cf9-3c8262f60dbb", | 173 | "id": "52066668-35e4-47a9-9cf9-3c8262f60dbb", | ||
133 | "name": "commonsense reasoning", | 174 | "name": "commonsense reasoning", | ||
134 | "state": "active", | 175 | "state": "active", | ||
135 | "vocabulary_id": null | 176 | "vocabulary_id": null | ||
136 | }, | 177 | }, | ||
137 | { | 178 | { | ||
138 | "display_name": "context-based question answering", | 179 | "display_name": "context-based question answering", | ||
139 | "id": "3913defa-f016-4c7d-8fa9-c6f87bc01ac6", | 180 | "id": "3913defa-f016-4c7d-8fa9-c6f87bc01ac6", | ||
140 | "name": "context-based question answering", | 181 | "name": "context-based question answering", | ||
141 | "state": "active", | 182 | "state": "active", | ||
142 | "vocabulary_id": null | 183 | "vocabulary_id": null | ||
143 | }, | 184 | }, | ||
144 | { | 185 | { | ||
145 | "display_name": "human-annotated", | 186 | "display_name": "human-annotated", | ||
146 | "id": "5503021d-9375-4a4d-b16c-6d670575ee98", | 187 | "id": "5503021d-9375-4a4d-b16c-6d670575ee98", | ||
147 | "name": "human-annotated", | 188 | "name": "human-annotated", | ||
148 | "state": "active", | 189 | "state": "active", | ||
149 | "vocabulary_id": null | 190 | "vocabulary_id": null | ||
150 | }, | 191 | }, | ||
151 | { | 192 | { | ||
152 | "display_name": "model-generated", | 193 | "display_name": "model-generated", | ||
153 | "id": "c00b58ec-556d-4df9-9c1f-e35deaf32abd", | 194 | "id": "c00b58ec-556d-4df9-9c1f-e35deaf32abd", | ||
154 | "name": "model-generated", | 195 | "name": "model-generated", | ||
155 | "state": "active", | 196 | "state": "active", | ||
156 | "vocabulary_id": null | 197 | "vocabulary_id": null | ||
157 | }, | 198 | }, | ||
158 | { | 199 | { | ||
159 | "display_name": "multi-hop", | 200 | "display_name": "multi-hop", | ||
160 | "id": "3efd04a4-c30b-41c8-9799-45bb552de0f4", | 201 | "id": "3efd04a4-c30b-41c8-9799-45bb552de0f4", | ||
161 | "name": "multi-hop", | 202 | "name": "multi-hop", | ||
162 | "state": "active", | 203 | "state": "active", | ||
163 | "vocabulary_id": null | 204 | "vocabulary_id": null | ||
164 | }, | 205 | }, | ||
165 | { | 206 | { | ||
166 | "display_name": "multi-step reasoning", | 207 | "display_name": "multi-step reasoning", | ||
167 | "id": "da607e31-92fd-474b-b0f2-326f634ca6d0", | 208 | "id": "da607e31-92fd-474b-b0f2-326f634ca6d0", | ||
168 | "name": "multi-step reasoning", | 209 | "name": "multi-step reasoning", | ||
169 | "state": "active", | 210 | "state": "active", | ||
170 | "vocabulary_id": null | 211 | "vocabulary_id": null | ||
171 | }, | 212 | }, | ||
172 | { | 213 | { | ||
173 | "display_name": "open-domain questions", | 214 | "display_name": "open-domain questions", | ||
174 | "id": "f78262a8-cbf5-4c1b-8d40-b3b0bed6ef29", | 215 | "id": "f78262a8-cbf5-4c1b-8d40-b3b0bed6ef29", | ||
175 | "name": "open-domain questions", | 216 | "name": "open-domain questions", | ||
176 | "state": "active", | 217 | "state": "active", | ||
177 | "vocabulary_id": null | 218 | "vocabulary_id": null | ||
178 | }, | 219 | }, | ||
179 | { | 220 | { | ||
180 | "display_name": "question answering", | 221 | "display_name": "question answering", | ||
181 | "id": "d4e35d0b-ca07-4d05-bb27-50b2f30f2f00", | 222 | "id": "d4e35d0b-ca07-4d05-bb27-50b2f30f2f00", | ||
182 | "name": "question answering", | 223 | "name": "question answering", | ||
183 | "state": "active", | 224 | "state": "active", | ||
184 | "vocabulary_id": null | 225 | "vocabulary_id": null | ||
185 | }, | 226 | }, | ||
186 | { | 227 | { | ||
187 | "display_name": "reasoning steps", | 228 | "display_name": "reasoning steps", | ||
188 | "id": "c0b8f360-d155-4ba9-b994-aead31d27930", | 229 | "id": "c0b8f360-d155-4ba9-b994-aead31d27930", | ||
189 | "name": "reasoning steps", | 230 | "name": "reasoning steps", | ||
190 | "state": "active", | 231 | "state": "active", | ||
191 | "vocabulary_id": null | 232 | "vocabulary_id": null | ||
192 | }, | 233 | }, | ||
193 | { | 234 | { | ||
194 | "display_name": "strategyqa", | 235 | "display_name": "strategyqa", | ||
195 | "id": "a2530fe0-af46-4bf2-8387-309181bb08bc", | 236 | "id": "a2530fe0-af46-4bf2-8387-309181bb08bc", | ||
196 | "name": "strategyqa", | 237 | "name": "strategyqa", | ||
197 | "state": "active", | 238 | "state": "active", | ||
198 | "vocabulary_id": null | 239 | "vocabulary_id": null | ||
199 | }, | 240 | }, | ||
200 | { | 241 | { | ||
201 | "display_name": "synthetic", | 242 | "display_name": "synthetic", | ||
202 | "id": "abe7f9cd-9096-4c14-9cdf-b4b1e273d8d9", | 243 | "id": "abe7f9cd-9096-4c14-9cdf-b4b1e273d8d9", | ||
203 | "name": "synthetic", | 244 | "name": "synthetic", | ||
204 | "state": "active", | 245 | "state": "active", | ||
205 | "vocabulary_id": null | 246 | "vocabulary_id": null | ||
206 | }, | 247 | }, | ||
207 | { | 248 | { | ||
208 | "display_name": "yes-no questions", | 249 | "display_name": "yes-no questions", | ||
209 | "id": "5b43d825-ab0b-4700-bbdb-e93c16ca13cd", | 250 | "id": "5b43d825-ab0b-4700-bbdb-e93c16ca13cd", | ||
210 | "name": "yes-no questions", | 251 | "name": "yes-no questions", | ||
211 | "state": "active", | 252 | "state": "active", | ||
212 | "vocabulary_id": null | 253 | "vocabulary_id": null | ||
213 | } | 254 | } | ||
214 | ], | 255 | ], | ||
215 | "title": "StrategyQA", | 256 | "title": "StrategyQA", | ||
216 | "type": "dataset", | 257 | "type": "dataset", | ||
217 | "version": "" | 258 | "version": "" | ||
218 | } | 259 | } |