{"created":"2023-07-27T06:25:56.235740+00:00","id":9442,"links":{},"metadata":{"_buckets":{"deposit":"5a41d7bf-7621-4f3f-8f43-16dd4e6ff9cd"},"_deposit":{"created_by":3,"id":"9442","owners":[3],"pid":{"revision_id":0,"type":"depid","value":"9442"},"status":"published"},"_oai":{"id":"oai:kanazawa-u.repo.nii.ac.jp:00009442","sets":["4193:4194:4195"]},"author_link":["13596","122","13595","9888"],"item_4_biblio_info_8":{"attribute_name":"書誌情報","attribute_value_mlt":[{"bibliographicIssueDates":{"bibliographicIssueDate":"2014-11-24","bibliographicIssueDateType":"Issued"},"bibliographic_titles":[{"bibliographic_title":"SIGGRAPH Asia 2014 Emerging Technologies, SA 2014"}]}]},"item_4_creator_33":{"attribute_name":"著者別表示","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"秋田, 純一"}],"nameIdentifiers":[{},{},{},{}]}]},"item_4_description_21":{"attribute_name":"抄録","attribute_value_mlt":[{"subitem_description":"Although we obtain a lot of information in our environment via the visual modality, we also obtain rich information via the non-visual modality. In the mechanism how we perceive our environment, we use not only the sensor information, but also \"how it changes according to how we act.\" For example, we obtain the haptic information from the haptic sensor on our finger, and when we move our finger along to the surface of the touching object, the haptic information changes according to the finger motion, and we \"perceive\" the whole shape of the object by executing the action-and-sensing process. In other words, we have a high ability to \"integrate\" the relation of our body's action and its related sensing data, so as to improve the accuracy of sensor in our body. Based on this idea, we developed a simple perception aid device with user's explorer action, to perceive the object at a distance, which has a linked range sensor and haptic actuator, which we name \"FutureBody-Finger.\" The distance sensor measures the distance to the object (20-80[cm]), and it is converted to the angle of lever attached at the servo motor (0-60[deg]). The user holds this device in his hand with attaching his index finger on the device's lever. For the long distance to the object, the lever leans to the front, and the user feels nothing. On the other hand, for the short distance to the object, the lever stands vertically, and the user feels the existence of the object. Although the device simply measures the distance to the single point on the object, as the user \"explorers\" around him, the user can obtain more rich distance information of the surrounding object, and hence, finally perceive the shape of the whole object.","subitem_description_type":"Abstract"}]},"item_4_description_22":{"attribute_name":"内容記述","attribute_value_mlt":[{"subitem_description":"SIGGRAPH Asia 2014 Emerging Technologies, SA 2014; Shenzhen; China; 3 December 2014 through 6 December 2014; Code 109455","subitem_description_type":"Other"}]},"item_4_description_5":{"attribute_name":"提供者所属","attribute_value_mlt":[{"subitem_description":"金沢大学融合研究域融合科学系","subitem_description_type":"Other"}]},"item_4_publisher_17":{"attribute_name":"出版者","attribute_value_mlt":[{"subitem_publisher":"Association for Computing Machinery, Inc"}]},"item_4_relation_12":{"attribute_name":"DOI","attribute_value_mlt":[{"subitem_relation_type":"isVersionOf","subitem_relation_type_id":{"subitem_relation_type_id_text":"10.1145/2669047.2669058","subitem_relation_type_select":"DOI"}}]},"item_4_version_type_25":{"attribute_name":"著者版フラグ","attribute_value_mlt":[{"subitem_version_resource":"http://purl.org/coar/version/c_ab4af688f83e57aa","subitem_version_type":"AM"}]},"item_creator":{"attribute_name":"著者","attribute_type":"creator","attribute_value_mlt":[{"creatorNames":[{"creatorName":"Akita, Junichi"}],"nameIdentifiers":[{},{},{},{}]},{"creatorNames":[{"creatorName":"Ono, Tetsuo"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Ito, Kiyohide"}],"nameIdentifiers":[{}]},{"creatorNames":[{"creatorName":"Okamoto, Makoto"}],"nameIdentifiers":[{}]}]},"item_files":{"attribute_name":"ファイル情報","attribute_type":"file","attribute_value_mlt":[{"accessrole":"open_date","date":[{"dateType":"Available","dateValue":"2017-10-03"}],"displaytype":"detail","filename":"TE-PR-AKITA-J-2014.pdf","filesize":[{"value":"6.5 MB"}],"format":"application/pdf","licensetype":"license_11","mimetype":"application/pdf","url":{"label":"TE-PR-AKITA-J-2014.pdf","url":"https://kanazawa-u.repo.nii.ac.jp/record/9442/files/TE-PR-AKITA-J-2014.pdf"},"version_id":"f01f3308-2a7d-4683-ba21-64d25d7593a4"}]},"item_language":{"attribute_name":"言語","attribute_value_mlt":[{"subitem_language":"eng"}]},"item_resource_type":{"attribute_name":"資源タイプ","attribute_value_mlt":[{"resourcetype":"journal article","resourceuri":"http://purl.org/coar/resource_type/c_6501"}]},"item_title":"Touch at a distance: Simple perception aid device with user's explorer action","item_titles":{"attribute_name":"タイトル","attribute_value_mlt":[{"subitem_title":"Touch at a distance: Simple perception aid device with user's explorer action"}]},"item_type_id":"4","owner":"3","path":["4195"],"pubdate":{"attribute_name":"公開日","attribute_value":"2017-10-03"},"publish_date":"2017-10-03","publish_status":"0","recid":"9442","relation_version_is_last":true,"title":["Touch at a distance: Simple perception aid device with user's explorer action"],"weko_creator_id":"3","weko_shared_id":3},"updated":"2023-07-27T10:19:17.024083+00:00"}