Changes
On December 2, 2024 at 6:21:51 PM UTC, admin:
-
Changed value of field
doi_status
toTrue
in MotionCLIP: Exposing Human Motion Generation to CLIP Space -
Changed value of field
doi_date_published
to2024-12-02
in MotionCLIP: Exposing Human Motion Generation to CLIP Space -
Added resource Original Metadata to MotionCLIP: Exposing Human Motion Generation to CLIP Space
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Guy Tevet", | 3 | "author": "Guy Tevet", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [], | 5 | "citation": [], | ||
6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
7 | "defined_in": "https://doi.org/10.48550/arXiv.2203.08063", | 7 | "defined_in": "https://doi.org/10.48550/arXiv.2203.08063", | ||
8 | "doi": "10.57702/omvyrrqz", | 8 | "doi": "10.57702/omvyrrqz", | ||
n | 9 | "doi_date_published": null, | n | 9 | "doi_date_published": "2024-12-02", |
10 | "doi_publisher": "TIB", | 10 | "doi_publisher": "TIB", | ||
n | 11 | "doi_status": false, | n | 11 | "doi_status": true, |
12 | "domain": "https://service.tib.eu/ldmservice", | 12 | "domain": "https://service.tib.eu/ldmservice", | ||
13 | "extra_authors": [ | 13 | "extra_authors": [ | ||
14 | { | 14 | { | ||
15 | "extra_author": "Brian Gordon", | 15 | "extra_author": "Brian Gordon", | ||
16 | "orcid": "" | 16 | "orcid": "" | ||
17 | }, | 17 | }, | ||
18 | { | 18 | { | ||
19 | "extra_author": "Amir Hertz", | 19 | "extra_author": "Amir Hertz", | ||
20 | "orcid": "" | 20 | "orcid": "" | ||
21 | }, | 21 | }, | ||
22 | { | 22 | { | ||
23 | "extra_author": "Amit H. Bermano", | 23 | "extra_author": "Amit H. Bermano", | ||
24 | "orcid": "" | 24 | "orcid": "" | ||
25 | }, | 25 | }, | ||
26 | { | 26 | { | ||
27 | "extra_author": "Daniel Cohen-Or", | 27 | "extra_author": "Daniel Cohen-Or", | ||
28 | "orcid": "" | 28 | "orcid": "" | ||
29 | } | 29 | } | ||
30 | ], | 30 | ], | ||
31 | "groups": [ | 31 | "groups": [ | ||
32 | { | 32 | { | ||
33 | "description": "", | 33 | "description": "", | ||
34 | "display_name": "3D Sequences", | 34 | "display_name": "3D Sequences", | ||
35 | "id": "504f62da-6112-4c88-a73c-0ca2834e3adb", | 35 | "id": "504f62da-6112-4c88-a73c-0ca2834e3adb", | ||
36 | "image_display_url": "", | 36 | "image_display_url": "", | ||
37 | "name": "3d-sequences", | 37 | "name": "3d-sequences", | ||
38 | "title": "3D Sequences" | 38 | "title": "3D Sequences" | ||
39 | }, | 39 | }, | ||
40 | { | 40 | { | ||
41 | "description": "", | 41 | "description": "", | ||
42 | "display_name": "Human Motion Generation", | 42 | "display_name": "Human Motion Generation", | ||
43 | "id": "08c7e794-2f59-40d0-bc18-9b3dbfd8b263", | 43 | "id": "08c7e794-2f59-40d0-bc18-9b3dbfd8b263", | ||
44 | "image_display_url": "", | 44 | "image_display_url": "", | ||
45 | "name": "human-motion-generation", | 45 | "name": "human-motion-generation", | ||
46 | "title": "Human Motion Generation" | 46 | "title": "Human Motion Generation" | ||
47 | }, | 47 | }, | ||
48 | { | 48 | { | ||
49 | "description": "", | 49 | "description": "", | ||
50 | "display_name": "Virtual Characters", | 50 | "display_name": "Virtual Characters", | ||
51 | "id": "315bb6dc-2d48-469f-88bc-a2515811c1a3", | 51 | "id": "315bb6dc-2d48-469f-88bc-a2515811c1a3", | ||
52 | "image_display_url": "", | 52 | "image_display_url": "", | ||
53 | "name": "virtual-characters", | 53 | "name": "virtual-characters", | ||
54 | "title": "Virtual Characters" | 54 | "title": "Virtual Characters" | ||
55 | } | 55 | } | ||
56 | ], | 56 | ], | ||
57 | "id": "255f44a3-b33c-4f5b-b6ce-dfecd594e8c0", | 57 | "id": "255f44a3-b33c-4f5b-b6ce-dfecd594e8c0", | ||
58 | "isopen": false, | 58 | "isopen": false, | ||
59 | "landing_page": "https://guytevet.github.io/motionclip-page/", | 59 | "landing_page": "https://guytevet.github.io/motionclip-page/", | ||
60 | "license_title": null, | 60 | "license_title": null, | ||
61 | "link_orkg": "", | 61 | "link_orkg": "", | ||
62 | "metadata_created": "2024-12-02T18:21:50.346433", | 62 | "metadata_created": "2024-12-02T18:21:50.346433", | ||
n | 63 | "metadata_modified": "2024-12-02T18:21:50.346439", | n | 63 | "metadata_modified": "2024-12-02T18:21:50.678405", |
64 | "name": | 64 | "name": | ||
65 | "motionclip--exposing-human-motion-generation-to-clip-space", | 65 | "motionclip--exposing-human-motion-generation-to-clip-space", | ||
66 | "notes": "Human motion generation includes the intuitive | 66 | "notes": "Human motion generation includes the intuitive | ||
67 | description, editing, and generation of 3D sequences of human poses. | 67 | description, editing, and generation of 3D sequences of human poses. | ||
68 | It is relevant to many applications that require virtual or robotic | 68 | It is relevant to many applications that require virtual or robotic | ||
69 | characters. Motion generation is, however, a challenging task. Perhaps | 69 | characters. Motion generation is, however, a challenging task. Perhaps | ||
70 | the most challenging aspect is the limited availability of data, which | 70 | the most challenging aspect is the limited availability of data, which | ||
71 | is expensive to acquire and to label.", | 71 | is expensive to acquire and to label.", | ||
n | 72 | "num_resources": 0, | n | 72 | "num_resources": 1, |
73 | "num_tags": 3, | 73 | "num_tags": 3, | ||
74 | "organization": { | 74 | "organization": { | ||
75 | "approval_status": "approved", | 75 | "approval_status": "approved", | ||
76 | "created": "2024-11-25T12:11:38.292601", | 76 | "created": "2024-11-25T12:11:38.292601", | ||
77 | "description": "", | 77 | "description": "", | ||
78 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 78 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
79 | "image_url": "", | 79 | "image_url": "", | ||
80 | "is_organization": true, | 80 | "is_organization": true, | ||
81 | "name": "no-organization", | 81 | "name": "no-organization", | ||
82 | "state": "active", | 82 | "state": "active", | ||
83 | "title": "No Organization", | 83 | "title": "No Organization", | ||
84 | "type": "organization" | 84 | "type": "organization" | ||
85 | }, | 85 | }, | ||
86 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 86 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
87 | "private": false, | 87 | "private": false, | ||
88 | "relationships_as_object": [], | 88 | "relationships_as_object": [], | ||
89 | "relationships_as_subject": [], | 89 | "relationships_as_subject": [], | ||
t | 90 | "resources": [], | t | 90 | "resources": [ |
91 | { | ||||
92 | "cache_last_updated": null, | ||||
93 | "cache_url": null, | ||||
94 | "created": "2024-12-02T18:38:42", | ||||
95 | "data": [ | ||||
96 | "dcterms:title", | ||||
97 | "dcterms:accessRights", | ||||
98 | "dcterms:creator", | ||||
99 | "dcterms:description", | ||||
100 | "dcterms:issued", | ||||
101 | "dcterms:language", | ||||
102 | "dcterms:identifier", | ||||
103 | "dcat:theme", | ||||
104 | "dcterms:type", | ||||
105 | "dcat:keyword", | ||||
106 | "dcat:landingPage", | ||||
107 | "dcterms:hasVersion", | ||||
108 | "dcterms:format", | ||||
109 | "mls:task", | ||||
110 | "datacite:isDescribedBy" | ||||
111 | ], | ||||
112 | "description": "The json representation of the dataset with its | ||||
113 | distributions based on DCAT.", | ||||
114 | "format": "JSON", | ||||
115 | "hash": "", | ||||
116 | "id": "63200a57-6e8f-4b70-af23-677b73190832", | ||||
117 | "last_modified": "2024-12-02T18:21:50.671435", | ||||
118 | "metadata_modified": "2024-12-02T18:21:50.681095", | ||||
119 | "mimetype": "application/json", | ||||
120 | "mimetype_inner": null, | ||||
121 | "name": "Original Metadata", | ||||
122 | "package_id": "255f44a3-b33c-4f5b-b6ce-dfecd594e8c0", | ||||
123 | "position": 0, | ||||
124 | "resource_type": null, | ||||
125 | "size": 1101, | ||||
126 | "state": "active", | ||||
127 | "url": | ||||
128 | resource/63200a57-6e8f-4b70-af23-677b73190832/download/metadata.json", | ||||
129 | "url_type": "upload" | ||||
130 | } | ||||
131 | ], | ||||
91 | "services_used_list": "", | 132 | "services_used_list": "", | ||
92 | "state": "active", | 133 | "state": "active", | ||
93 | "tags": [ | 134 | "tags": [ | ||
94 | { | 135 | { | ||
95 | "display_name": "3D Reconstruction", | 136 | "display_name": "3D Reconstruction", | ||
96 | "id": "4473ef66-a566-4165-a64e-4f56362435fb", | 137 | "id": "4473ef66-a566-4165-a64e-4f56362435fb", | ||
97 | "name": "3D Reconstruction", | 138 | "name": "3D Reconstruction", | ||
98 | "state": "active", | 139 | "state": "active", | ||
99 | "vocabulary_id": null | 140 | "vocabulary_id": null | ||
100 | }, | 141 | }, | ||
101 | { | 142 | { | ||
102 | "display_name": "Human Pose", | 143 | "display_name": "Human Pose", | ||
103 | "id": "bf30d3ef-f597-4fca-987c-5aa8077462bc", | 144 | "id": "bf30d3ef-f597-4fca-987c-5aa8077462bc", | ||
104 | "name": "Human Pose", | 145 | "name": "Human Pose", | ||
105 | "state": "active", | 146 | "state": "active", | ||
106 | "vocabulary_id": null | 147 | "vocabulary_id": null | ||
107 | }, | 148 | }, | ||
108 | { | 149 | { | ||
109 | "display_name": "Motion Capture", | 150 | "display_name": "Motion Capture", | ||
110 | "id": "62f7b8b6-7259-4e61-909a-4a4dc27368c9", | 151 | "id": "62f7b8b6-7259-4e61-909a-4a4dc27368c9", | ||
111 | "name": "Motion Capture", | 152 | "name": "Motion Capture", | ||
112 | "state": "active", | 153 | "state": "active", | ||
113 | "vocabulary_id": null | 154 | "vocabulary_id": null | ||
114 | } | 155 | } | ||
115 | ], | 156 | ], | ||
116 | "title": "MotionCLIP: Exposing Human Motion Generation to CLIP | 157 | "title": "MotionCLIP: Exposing Human Motion Generation to CLIP | ||
117 | Space", | 158 | Space", | ||
118 | "type": "dataset", | 159 | "type": "dataset", | ||
119 | "version": "" | 160 | "version": "" | ||
120 | } | 161 | } |