Changes
On December 16, 2024 at 7:26:21 PM UTC, admin:
-
Changed value of field
doi_status
toTrue
in SALYPATH: A DEEP-BASED ARCHITECTURE FOR VISUAL ATTENTION PREDICTION -
Changed value of field
doi_date_published
to2024-12-16
in SALYPATH: A DEEP-BASED ARCHITECTURE FOR VISUAL ATTENTION PREDICTION -
Added resource Original Metadata to SALYPATH: A DEEP-BASED ARCHITECTURE FOR VISUAL ATTENTION PREDICTION
f | 1 | { | f | 1 | { |
2 | "access_rights": "", | 2 | "access_rights": "", | ||
3 | "author": "Mohamed A. KERKOURI", | 3 | "author": "Mohamed A. KERKOURI", | ||
4 | "author_email": "", | 4 | "author_email": "", | ||
5 | "citation": [], | 5 | "citation": [], | ||
6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | 6 | "creator_user_id": "17755db4-395a-4b3b-ac09-e8e3484ca700", | ||
7 | "defined_in": "", | 7 | "defined_in": "", | ||
8 | "doi": "10.57702/amx6uipc", | 8 | "doi": "10.57702/amx6uipc", | ||
n | 9 | "doi_date_published": null, | n | 9 | "doi_date_published": "2024-12-16", |
10 | "doi_publisher": "TIB", | 10 | "doi_publisher": "TIB", | ||
n | 11 | "doi_status": false, | n | 11 | "doi_status": true, |
12 | "domain": "https://service.tib.eu/ldmservice", | 12 | "domain": "https://service.tib.eu/ldmservice", | ||
13 | "extra_authors": [ | 13 | "extra_authors": [ | ||
14 | { | 14 | { | ||
15 | "extra_author": "Marouane TLIBA", | 15 | "extra_author": "Marouane TLIBA", | ||
16 | "orcid": "" | 16 | "orcid": "" | ||
17 | }, | 17 | }, | ||
18 | { | 18 | { | ||
19 | "extra_author": "Aladine CHETOUANI", | 19 | "extra_author": "Aladine CHETOUANI", | ||
20 | "orcid": "" | 20 | "orcid": "" | ||
21 | }, | 21 | }, | ||
22 | { | 22 | { | ||
23 | "extra_author": "Rachid HARBA", | 23 | "extra_author": "Rachid HARBA", | ||
24 | "orcid": "" | 24 | "orcid": "" | ||
25 | } | 25 | } | ||
26 | ], | 26 | ], | ||
27 | "groups": [ | 27 | "groups": [ | ||
28 | { | 28 | { | ||
29 | "description": "", | 29 | "description": "", | ||
30 | "display_name": "Scanpath Prediction", | 30 | "display_name": "Scanpath Prediction", | ||
31 | "id": "b64bbd5c-89c7-49e0-bf80-11435d0d9dda", | 31 | "id": "b64bbd5c-89c7-49e0-bf80-11435d0d9dda", | ||
32 | "image_display_url": "", | 32 | "image_display_url": "", | ||
33 | "name": "scanpath-prediction", | 33 | "name": "scanpath-prediction", | ||
34 | "title": "Scanpath Prediction" | 34 | "title": "Scanpath Prediction" | ||
35 | }, | 35 | }, | ||
36 | { | 36 | { | ||
37 | "description": "", | 37 | "description": "", | ||
38 | "display_name": "Visual Attention", | 38 | "display_name": "Visual Attention", | ||
39 | "id": "403b0e97-dd2b-48f8-9846-7f2e0affb8e4", | 39 | "id": "403b0e97-dd2b-48f8-9846-7f2e0affb8e4", | ||
40 | "image_display_url": "", | 40 | "image_display_url": "", | ||
41 | "name": "visual-attention", | 41 | "name": "visual-attention", | ||
42 | "title": "Visual Attention" | 42 | "title": "Visual Attention" | ||
43 | } | 43 | } | ||
44 | ], | 44 | ], | ||
45 | "id": "63eec664-5fed-46ba-9fa0-f7d998d504ec", | 45 | "id": "63eec664-5fed-46ba-9fa0-f7d998d504ec", | ||
46 | "isopen": false, | 46 | "isopen": false, | ||
47 | "landing_page": "https://github.com/kmamine/", | 47 | "landing_page": "https://github.com/kmamine/", | ||
48 | "license_title": null, | 48 | "license_title": null, | ||
49 | "link_orkg": "", | 49 | "link_orkg": "", | ||
50 | "metadata_created": "2024-12-16T19:26:19.861516", | 50 | "metadata_created": "2024-12-16T19:26:19.861516", | ||
n | 51 | "metadata_modified": "2024-12-16T19:26:19.861523", | n | 51 | "metadata_modified": "2024-12-16T19:26:20.339208", |
52 | "name": | 52 | "name": | ||
53 | "salypath--a-deep-based-architecture-for-visual-attention-prediction", | 53 | "salypath--a-deep-based-architecture-for-visual-attention-prediction", | ||
54 | "notes": "Human vision is naturally more attracted by some regions | 54 | "notes": "Human vision is naturally more attracted by some regions | ||
55 | within their field of view than others. This intrinsic selectivity | 55 | within their field of view than others. This intrinsic selectivity | ||
56 | mechanism, so-called visual attention, is in\ufb02uenced by both high- | 56 | mechanism, so-called visual attention, is in\ufb02uenced by both high- | ||
57 | and low-level factors; such as the global environment (illumination, | 57 | and low-level factors; such as the global environment (illumination, | ||
58 | background texture, etc.), stimulus characteristics (color, intensity, | 58 | background texture, etc.), stimulus characteristics (color, intensity, | ||
59 | orientation, etc.), and some prior visual information.", | 59 | orientation, etc.), and some prior visual information.", | ||
n | 60 | "num_resources": 0, | n | 60 | "num_resources": 1, |
61 | "num_tags": 4, | 61 | "num_tags": 4, | ||
62 | "organization": { | 62 | "organization": { | ||
63 | "approval_status": "approved", | 63 | "approval_status": "approved", | ||
64 | "created": "2024-11-25T12:11:38.292601", | 64 | "created": "2024-11-25T12:11:38.292601", | ||
65 | "description": "", | 65 | "description": "", | ||
66 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 66 | "id": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
67 | "image_url": "", | 67 | "image_url": "", | ||
68 | "is_organization": true, | 68 | "is_organization": true, | ||
69 | "name": "no-organization", | 69 | "name": "no-organization", | ||
70 | "state": "active", | 70 | "state": "active", | ||
71 | "title": "No Organization", | 71 | "title": "No Organization", | ||
72 | "type": "organization" | 72 | "type": "organization" | ||
73 | }, | 73 | }, | ||
74 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | 74 | "owner_org": "079d46db-32df-4b48-91f3-0a8bc8f69559", | ||
75 | "private": false, | 75 | "private": false, | ||
76 | "relationships_as_object": [], | 76 | "relationships_as_object": [], | ||
77 | "relationships_as_subject": [], | 77 | "relationships_as_subject": [], | ||
t | 78 | "resources": [], | t | 78 | "resources": [ |
79 | { | ||||
80 | "cache_last_updated": null, | ||||
81 | "cache_url": null, | ||||
82 | "created": "2024-12-16T18:25:41", | ||||
83 | "data": [ | ||||
84 | "dcterms:title", | ||||
85 | "dcterms:accessRights", | ||||
86 | "dcterms:creator", | ||||
87 | "dcterms:description", | ||||
88 | "dcterms:issued", | ||||
89 | "dcterms:language", | ||||
90 | "dcterms:identifier", | ||||
91 | "dcat:theme", | ||||
92 | "dcterms:type", | ||||
93 | "dcat:keyword", | ||||
94 | "dcat:landingPage", | ||||
95 | "dcterms:hasVersion", | ||||
96 | "dcterms:format", | ||||
97 | "mls:task" | ||||
98 | ], | ||||
99 | "description": "The json representation of the dataset with its | ||||
100 | distributions based on DCAT.", | ||||
101 | "format": "JSON", | ||||
102 | "hash": "", | ||||
103 | "id": "8cf48205-d2b5-4aa0-bfcd-761da171de02", | ||||
104 | "last_modified": "2024-12-16T19:26:20.331718", | ||||
105 | "metadata_modified": "2024-12-16T19:26:20.342084", | ||||
106 | "mimetype": "application/json", | ||||
107 | "mimetype_inner": null, | ||||
108 | "name": "Original Metadata", | ||||
109 | "package_id": "63eec664-5fed-46ba-9fa0-f7d998d504ec", | ||||
110 | "position": 0, | ||||
111 | "resource_type": null, | ||||
112 | "size": 1044, | ||||
113 | "state": "active", | ||||
114 | "url": | ||||
115 | resource/8cf48205-d2b5-4aa0-bfcd-761da171de02/download/metadata.json", | ||||
116 | "url_type": "upload" | ||||
117 | } | ||||
118 | ], | ||||
79 | "services_used_list": "", | 119 | "services_used_list": "", | ||
80 | "state": "active", | 120 | "state": "active", | ||
81 | "tags": [ | 121 | "tags": [ | ||
82 | { | 122 | { | ||
83 | "display_name": "deep learning", | 123 | "display_name": "deep learning", | ||
84 | "id": "19e41883-3799-4184-9e0e-26c95795b119", | 124 | "id": "19e41883-3799-4184-9e0e-26c95795b119", | ||
85 | "name": "deep learning", | 125 | "name": "deep learning", | ||
86 | "state": "active", | 126 | "state": "active", | ||
87 | "vocabulary_id": null | 127 | "vocabulary_id": null | ||
88 | }, | 128 | }, | ||
89 | { | 129 | { | ||
90 | "display_name": "saliency", | 130 | "display_name": "saliency", | ||
91 | "id": "e8c29766-f718-4bd8-87ca-f86bd741cba7", | 131 | "id": "e8c29766-f718-4bd8-87ca-f86bd741cba7", | ||
92 | "name": "saliency", | 132 | "name": "saliency", | ||
93 | "state": "active", | 133 | "state": "active", | ||
94 | "vocabulary_id": null | 134 | "vocabulary_id": null | ||
95 | }, | 135 | }, | ||
96 | { | 136 | { | ||
97 | "display_name": "scanpath", | 137 | "display_name": "scanpath", | ||
98 | "id": "daeedfad-28a4-477b-b95b-4cfa815e51d9", | 138 | "id": "daeedfad-28a4-477b-b95b-4cfa815e51d9", | ||
99 | "name": "scanpath", | 139 | "name": "scanpath", | ||
100 | "state": "active", | 140 | "state": "active", | ||
101 | "vocabulary_id": null | 141 | "vocabulary_id": null | ||
102 | }, | 142 | }, | ||
103 | { | 143 | { | ||
104 | "display_name": "visual attention", | 144 | "display_name": "visual attention", | ||
105 | "id": "bd5f6475-4fb3-4c8f-8a3b-c51bc58bc8b4", | 145 | "id": "bd5f6475-4fb3-4c8f-8a3b-c51bc58bc8b4", | ||
106 | "name": "visual attention", | 146 | "name": "visual attention", | ||
107 | "state": "active", | 147 | "state": "active", | ||
108 | "vocabulary_id": null | 148 | "vocabulary_id": null | ||
109 | } | 149 | } | ||
110 | ], | 150 | ], | ||
111 | "title": "SALYPATH: A DEEP-BASED ARCHITECTURE FOR VISUAL ATTENTION | 151 | "title": "SALYPATH: A DEEP-BASED ARCHITECTURE FOR VISUAL ATTENTION | ||
112 | PREDICTION", | 152 | PREDICTION", | ||
113 | "type": "dataset", | 153 | "type": "dataset", | ||
114 | "version": "" | 154 | "version": "" | ||
115 | } | 155 | } |