Changes
On December 16, 2024 at 7:24:30 PM UTC, admin:
-
Changed value of field
doi_status
toTrue
in FLIP: A Method for Reducing Computation in Contrastive Language-Image Pre-training -
Changed value of field
doi_date_published
to2024-12-16
in FLIP: A Method for Reducing Computation in Contrastive Language-Image Pre-training -
Added resource Original Metadata to FLIP: A Method for Reducing Computation in Contrastive Language-Image Pre-training
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Xi Chen", | 3 | "author": "Xi Chen", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [], | 5 | "citation": [], | ||
6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
7 | "defined_in": "", | 7 | "defined_in": "", | ||
8 | "doi": "10.57702/n67htx19", | 8 | "doi": "10.57702/n67htx19", | ||
n | 9 | "doi_date_published": null, | n | 9 | "doi_date_published": "2024-12-16", |
10 | "doi_publisher": "TIB", | 10 | "doi_publisher": "TIB", | ||
n | 11 | "doi_status": false, | n | 11 | "doi_status": true, |
12 | "domain": "https://service.tib.eu/ldmservice", | 12 | "domain": "https://service.tib.eu/ldmservice", | ||
13 | "extra_authors": [ | 13 | "extra_authors": [ | ||
14 | { | 14 | { | ||
15 | "extra_author": "Xiao Wang", | 15 | "extra_author": "Xiao Wang", | ||
16 | "orcid": "" | 16 | "orcid": "" | ||
17 | }, | 17 | }, | ||
18 | { | 18 | { | ||
19 | "extra_author": "Soravit Changpinyo", | 19 | "extra_author": "Soravit Changpinyo", | ||
20 | "orcid": "" | 20 | "orcid": "" | ||
21 | } | 21 | } | ||
22 | ], | 22 | ], | ||
23 | "groups": [], | 23 | "groups": [], | ||
24 | "id": "71d6d975-b013-434e-b9d1-9b729247f6ea", | 24 | "id": "71d6d975-b013-434e-b9d1-9b729247f6ea", | ||
25 | "isopen": false, | 25 | "isopen": false, | ||
26 | "landing_page": "", | 26 | "landing_page": "", | ||
27 | "license_title": null, | 27 | "license_title": null, | ||
28 | "link_orkg": "", | 28 | "link_orkg": "", | ||
29 | "metadata_created": "2024-12-16T19:24:29.436900", | 29 | "metadata_created": "2024-12-16T19:24:29.436900", | ||
n | 30 | "metadata_modified": "2024-12-16T19:24:29.436905", | n | 30 | "metadata_modified": "2024-12-16T19:24:29.851758", |
31 | "name": | 31 | "name": | ||
32 | -for-reducing-computation-in-contrastive-language-image-pre-training", | 32 | -for-reducing-computation-in-contrastive-language-image-pre-training", | ||
33 | "notes": "This paper proposes a method called FLIP, which masks half | 33 | "notes": "This paper proposes a method called FLIP, which masks half | ||
34 | or more patches of the training images to reduce computation by 2x and | 34 | or more patches of the training images to reduce computation by 2x and | ||
35 | allow for the use of larger batch sizes.", | 35 | allow for the use of larger batch sizes.", | ||
n | 36 | "num_resources": 0, | n | 36 | "num_resources": 1, |
37 | "num_tags": 3, | 37 | "num_tags": 3, | ||
38 | "organization": { | 38 | "organization": { | ||
39 | "approval_status": "approved", | 39 | "approval_status": "approved", | ||
40 | "created": "2024-11-25T12:11:38.292601", | 40 | "created": "2024-11-25T12:11:38.292601", | ||
41 | "description": "", | 41 | "description": "", | ||
42 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 42 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
43 | "image_url": "", | 43 | "image_url": "", | ||
44 | "is_organization": true, | 44 | "is_organization": true, | ||
45 | "name": "no-organization", | 45 | "name": "no-organization", | ||
46 | "state": "active", | 46 | "state": "active", | ||
47 | "title": "No Organization", | 47 | "title": "No Organization", | ||
48 | "type": "organization" | 48 | "type": "organization" | ||
49 | }, | 49 | }, | ||
50 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 50 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
51 | "private": false, | 51 | "private": false, | ||
52 | "relationships_as_object": [], | 52 | "relationships_as_object": [], | ||
53 | "relationships_as_subject": [], | 53 | "relationships_as_subject": [], | ||
t | 54 | "resources": [], | t | 54 | "resources": [ |
55 | { | ||||
56 | "cache_last_updated": null, | ||||
57 | "cache_url": null, | ||||
58 | "created": "2024-12-16T18:25:40", | ||||
59 | "data": [ | ||||
60 | "dcterms:title", | ||||
61 | "dcterms:accessRights", | ||||
62 | "dcterms:creator", | ||||
63 | "dcterms:description", | ||||
64 | "dcterms:issued", | ||||
65 | "dcterms:language", | ||||
66 | "dcterms:identifier", | ||||
67 | "dcat:theme", | ||||
68 | "dcterms:type", | ||||
69 | "dcat:keyword", | ||||
70 | "dcat:landingPage", | ||||
71 | "dcterms:hasVersion", | ||||
72 | "dcterms:format", | ||||
73 | "mls:task" | ||||
74 | ], | ||||
75 | "description": "The json representation of the dataset with its | ||||
76 | distributions based on DCAT.", | ||||
77 | "format": "JSON", | ||||
78 | "hash": "", | ||||
79 | "id": "760d6408-006a-4ed6-98f0-f28297d68b88", | ||||
80 | "last_modified": "2024-12-16T19:24:29.844546", | ||||
81 | "metadata_modified": "2024-12-16T19:24:29.854619", | ||||
82 | "mimetype": "application/json", | ||||
83 | "mimetype_inner": null, | ||||
84 | "name": "Original Metadata", | ||||
85 | "package_id": "71d6d975-b013-434e-b9d1-9b729247f6ea", | ||||
86 | "position": 0, | ||||
87 | "resource_type": null, | ||||
88 | "size": 814, | ||||
89 | "state": "active", | ||||
90 | "url": | ||||
91 | resource/760d6408-006a-4ed6-98f0-f28297d68b88/download/metadata.json", | ||||
92 | "url_type": "upload" | ||||
93 | } | ||||
94 | ], | ||||
55 | "services_used_list": "", | 95 | "services_used_list": "", | ||
56 | "state": "active", | 96 | "state": "active", | ||
57 | "tags": [ | 97 | "tags": [ | ||
58 | { | 98 | { | ||
59 | "display_name": "Contrastive Learning", | 99 | "display_name": "Contrastive Learning", | ||
60 | "id": "c2123673-bf75-4394-8b9c-da99c0d38053", | 100 | "id": "c2123673-bf75-4394-8b9c-da99c0d38053", | ||
61 | "name": "Contrastive Learning", | 101 | "name": "Contrastive Learning", | ||
62 | "state": "active", | 102 | "state": "active", | ||
63 | "vocabulary_id": null | 103 | "vocabulary_id": null | ||
64 | }, | 104 | }, | ||
65 | { | 105 | { | ||
66 | "display_name": "FLIP", | 106 | "display_name": "FLIP", | ||
67 | "id": "09500848-1b8c-47fe-81df-260c4bd87d2b", | 107 | "id": "09500848-1b8c-47fe-81df-260c4bd87d2b", | ||
68 | "name": "FLIP", | 108 | "name": "FLIP", | ||
69 | "state": "active", | 109 | "state": "active", | ||
70 | "vocabulary_id": null | 110 | "vocabulary_id": null | ||
71 | }, | 111 | }, | ||
72 | { | 112 | { | ||
73 | "display_name": "Image and Language Understanding", | 113 | "display_name": "Image and Language Understanding", | ||
74 | "id": "7ff95bc0-05a6-488a-940d-622ebd68f0e4", | 114 | "id": "7ff95bc0-05a6-488a-940d-622ebd68f0e4", | ||
75 | "name": "Image and Language Understanding", | 115 | "name": "Image and Language Understanding", | ||
76 | "state": "active", | 116 | "state": "active", | ||
77 | "vocabulary_id": null | 117 | "vocabulary_id": null | ||
78 | } | 118 | } | ||
79 | ], | 119 | ], | ||
80 | "title": "FLIP: A Method for Reducing Computation in Contrastive | 120 | "title": "FLIP: A Method for Reducing Computation in Contrastive | ||
81 | Language-Image Pre-training", | 121 | Language-Image Pre-training", | ||
82 | "type": "dataset", | 122 | "type": "dataset", | ||
83 | "version": "" | 123 | "version": "" | ||
84 | } | 124 | } |