From b09f8c25599c040c0717090f870020527b1c44cb Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Thu, 13 Apr 2023 12:47:28 +0900 Subject: [PATCH 01/60] [database_talker] add database_talker Co-authored-by: Kei Okada --- database_talker/.gitignore | 2 + database_talker/.vscode/c_cpp_properties.json | 67 ++++ database_talker/.vscode/settings.json | 12 + database_talker/CMakeLists.txt | 17 + database_talker/README.md | 34 ++ database_talker/launch/demo.launch | 59 +++ database_talker/package.xml | 19 + database_talker/requirements.txt | 2 + database_talker/rosinstall | 12 + database_talker/scripts/hoge.py | 344 ++++++++++++++++++ 10 files changed, 568 insertions(+) create mode 100644 database_talker/.gitignore create mode 100644 database_talker/.vscode/c_cpp_properties.json create mode 100644 database_talker/.vscode/settings.json create mode 100644 database_talker/CMakeLists.txt create mode 100644 database_talker/README.md create mode 100644 database_talker/launch/demo.launch create mode 100644 database_talker/package.xml create mode 100644 database_talker/requirements.txt create mode 100644 database_talker/rosinstall create mode 100644 database_talker/scripts/hoge.py diff --git a/database_talker/.gitignore b/database_talker/.gitignore new file mode 100644 index 0000000000..a6ed6d6c16 --- /dev/null +++ b/database_talker/.gitignore @@ -0,0 +1,2 @@ +auth/* +test_db/* diff --git a/database_talker/.vscode/c_cpp_properties.json b/database_talker/.vscode/c_cpp_properties.json new file mode 100644 index 0000000000..d5820d2fdf --- /dev/null +++ b/database_talker/.vscode/c_cpp_properties.json @@ -0,0 +1,67 @@ +{ + "configurations": [ + { + "browse": { + "databaseFilename": "${default}", + "limitSymbolsToIncludedHeaders": false + }, + "includePath": [ + "/home/sktometometo/ros/ws_jsk_fetch/devel/include/**", + "/opt/ros/melodic/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/amcl/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/audio_video_recorder/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/base_local_planner/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/BehaviorTree.ROS/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/carrot_planner/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/clear_costmap_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/complex_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/costmap_2d/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_demos/jsk_2015_06_hrp_drc/drc_task_common/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/dwa_local_planner/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/euslisp/Euslisp/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_ros/fetch_depth_layer/include/**", + "/opt/ros/melodic/share/fetch_gazebo/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_ros/fetch_ikfast_plugin/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_open_auto_dock/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/global_planner/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/imagesift/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_pcl_ros/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_pcl_ros_utils/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_perception/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_recognition_utils/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/jsk_robot_startup/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/jsk_topic_tools/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_3rdparty/jsk_ros_patch/laser_filters_jsk_patch/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/map_server/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/strands-project/mongodb_store/mongodb_store/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/move_base/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/move_slow_and_clear/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/nav_core/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/navfn/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_pr2_robot/pr2_base_trajectory_action/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/IntelRealSense/realsense-ros/realsense2_camera/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/resized_image_transport/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/mikeferguson/robot_calibration/robot_calibration/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/robot_controllers/robot_controllers/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/robot_controllers/robot_controllers_interface/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/ros_lock/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_roseus/roseus_bt/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/rosserial/rosserial_server/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/rosserial/rosserial_test/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/rotate_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/audio_common/sound_play/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/speak_and_wait_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/trigger_behavior_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/update_move_base_parameter_recovery/include/**", + "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/voxel_grid/include/**", + "/usr/include/**" + ], + "name": "ROS", + "intelliSenseMode": "gcc-x64", + "compilerPath": "/usr/bin/gcc", + "cStandard": "gnu11", + "cppStandard": "c++14" + } + ], + "version": 4 +} \ No newline at end of file diff --git a/database_talker/.vscode/settings.json b/database_talker/.vscode/settings.json new file mode 100644 index 0000000000..b4506c8de7 --- /dev/null +++ b/database_talker/.vscode/settings.json @@ -0,0 +1,12 @@ +{ + "python.autoComplete.extraPaths": [ + "/home/sktometometo/ros/ws_jsk_fetch/devel/lib/python2.7/dist-packages", + "/opt/ros/melodic/lib/python2.7/dist-packages", + "/home/sktometometo/raisimlib/install/lib" + ], + "python.analysis.extraPaths": [ + "/home/sktometometo/ros/ws_jsk_fetch/devel/lib/python2.7/dist-packages", + "/opt/ros/melodic/lib/python2.7/dist-packages", + "/home/sktometometo/raisimlib/install/lib" + ] +} diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt new file mode 100644 index 0000000000..ec1c558746 --- /dev/null +++ b/database_talker/CMakeLists.txt @@ -0,0 +1,17 @@ +cmake_minimum_required(VERSION 3.0.2) +project(database_talker) + +find_package(catkin REQUIRED COMPONENTS catkin_virtualenv) + +catkin_generate_virtualenv( + PYTHON_INTERPRETER python3 + CHECK_VENV FALSE +) + +catkin_package( +) + +catkin_install_python(PROGRAMS + scripts/hoge.py + DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} +) diff --git a/database_talker/README.md b/database_talker/README.md new file mode 100644 index 0000000000..1f7b39e3c1 --- /dev/null +++ b/database_talker/README.md @@ -0,0 +1,34 @@ +# hoge.py + +What is this? +## Requirements + +See `requirements.txt` for python requirements. + +For ROS dependency. + +- `google_chat_ros` in `jsk_3rdparty` with [this improvement](https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/451) +- `dialogflow_client` in `dialogflow_task_executive` package in `jsk_3rdparty` with [this improvement](https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/451) +- `mongodb_store` with https://github.com/strands-project/mongodb_store/pull/282 +- CLIP VQA ros node introduced with https://github.com/jsk-ros-pkg/jsk_recognition/pull/2730. +- `ros_google_cloud_language` package in `jsk_3rdparty` + +## How to use + +1. Setup google chat ros with Cloud Pub/Sub + 1. prepare `credential_json` and `project_id` and `subscription_id` +2. Setup dialogflow + 1. prepare `credential_json` and `project_id` +3. Setup mongodb_store + 1. Create database with mondodb +4. Setup CLIP VQA node + 1. Make docker model + 2. Run ROS Interface node +5. Setup google cloud natural language + 1. Prepare `credential_json` + +And run + +```bash +roslaunch database_talker demo.launch +``` diff --git a/database_talker/launch/demo.launch b/database_talker/launch/demo.launch new file mode 100644 index 0000000000..9cad704b2b --- /dev/null +++ b/database_talker/launch/demo.launch @@ -0,0 +1,59 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/database_talker/package.xml b/database_talker/package.xml new file mode 100644 index 0000000000..8f56000d06 --- /dev/null +++ b/database_talker/package.xml @@ -0,0 +1,19 @@ + + + database_talker + 0.0.0 + The database_talker package + + sktometometo + + TODO + + catkin + catkin_virtualenv + + + + requirements.txt + + diff --git a/database_talker/requirements.txt b/database_talker/requirements.txt new file mode 100644 index 0000000000..1e0be444af --- /dev/null +++ b/database_talker/requirements.txt @@ -0,0 +1,2 @@ +bson +pymongo diff --git a/database_talker/rosinstall b/database_talker/rosinstall new file mode 100644 index 0000000000..4963918407 --- /dev/null +++ b/database_talker/rosinstall @@ -0,0 +1,12 @@ +- git: + local-name: jsk-ros-pkg/jsk_3rdparty + uri: https://github.com/k-okada/jsk_3rdparty.git + version: add_more_functions +- git: + local-name: jsk-ros-pkg/jsk_recognition + uri: https://github.com/mqcmd196/jsk_recognition.git + version: PR/large-scale-vil +- git: + local-name: strands-project/mongodb_store + uri: https://github.com/k-okada/mongodb_store.git + version: patch-1 diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py new file mode 100644 index 0000000000..e052e6f658 --- /dev/null +++ b/database_talker/scripts/hoge.py @@ -0,0 +1,344 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import copy +import datetime +import difflib +import json +import os +import shutil +import tempfile + +import actionlib +import cv2 +import rospkg +import rospy +from bson import json_util +from dateutil import tz + +JST = tz.gettz('Asia/Tokyo') + +from cv_bridge import CvBridge + +bridge = CvBridge() + +from googletrans import Translator + +translator = Translator() + +from dialogflow_task_executive.msg import (DialogTextAction, + DialogTextActionResult, + DialogTextGoal) +from google_chat_ros.msg import (Card, Image, MessageEvent, Section, + SendMessageAction, SendMessageGoal, + WidgetMarkup) +from jsk_recognition_msgs.msg import (ClassificationTaskAction, + ClassificationTaskGoal) +from mongodb_store.util import deserialise_message +from mongodb_store_msgs.msg import StringPair, StringPairList +from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest +from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal + + +class MessageListener(object): + + def __init__(self): + rospy.loginfo("wait for '/google_chat_ros/send'") + self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', + SendMessageAction) + self.chat_ros_ac.wait_for_server() + #self.pub = rospy.Publisher('/google_chat_ros/send/goal', SendMessageActionGoal, queue_size=1) + + rospy.loginfo("wait for '/message_store/query_messages'") + rospy.wait_for_service('/message_store/query_messages') + self.query = rospy.ServiceProxy('/message_store/query_messages', + MongoQueryMsg) + + rospy.loginfo("wait for '/classification/clip_server'") + self.classification_ac = actionlib.SimpleActionClient( + '/classification/clip_server', ClassificationTaskAction) + self.classification_ac.wait_for_server() + + ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py + # rospy.loginfo("wait for '/dialogflow_client/text_action'") + # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) + # self.dialogflow_ac.wait_for_server() + + rospy.loginfo("wait for '/analyze_text/text'") + self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text', + AnalyzeTextAction) + self.analyze_text_ac.wait_for_server() + + # rospy.loginfo("subscribe '/google_chat_ros/message_activity'") + # self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + rospy.loginfo("subscribe '/dialogflow_client/text_action/result'") + self.sub = rospy.Subscriber('/dialogflow_client/text_action/result', + DialogTextActionResult, self.cb) + + rospy.loginfo("all done, ready") + + def make_reply(self, query): + rospy.logwarn("Run make_reply({})".format(query)) + # look for images + try: + # get chat message + results, chat_msgs = self.query_chat(query, datetime.datetime.now(JST)) + if len(results) == 0 and len(chat_msgs.metas) > 0: + meta = json.loads(chat_msgs.metas[-1].pairs[0].second) + results, chat_msgs = self.query_chat( + query, + datetime.datetime.fromtimestamp(meta['timestamp'] // 1000000000, + JST)) + # sort based on similarity with 'query' + chat_msgs_sorted = sorted(results, + key=lambda x: x['similarity'], + reverse=True) + + if len(chat_msgs_sorted) == 0: + rospy.logwarn("no chat message was found") + return + else: + # query images that was taken when chat_msgs are stored + msg = chat_msgs_sorted[0]['msg'] + meta = chat_msgs_sorted[0]['meta'] + text = chat_msgs_sorted[0]['message'] + timestamp = chat_msgs_sorted[0]['timestamp'] + #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) + # text = msg.message.argument_text or msg.message.text + # timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.logwarn( + "Found message '{}' at {}, corresponds to query {}".format( + text, timestamp.strftime('%Y-%m-%d %H:%M:%S'), query)) + + start_time = timestamp - datetime.timedelta(minutes=30) + end_time = timestamp + datetime.timedelta(minutes=30) + results = self.query_images_and_classify(query=query, + start_time=start_time, + end_time=end_time) + + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + rospy.loginfo("Probabilities of all images {}".format( + list(map(lambda x: (x['label'], x['similarities']), results)))) + best_result = results[0] + + # if probability is too low, try again + while len(results) > 0 and results[0]['similarities'] < 0.25: + start_time = end_time - datetime.timedelta(hours=24) + timestamp = datetime.datetime.now(JST) + results = self.query_images_and_classify(query=query, + start_time=start_time, + end_time=end_time, + limit=300) + if len(results) > 0: + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, + key=lambda x: x['similarities'], + reverse=True) + #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + if len(results) > 0 and results[0]['similarities'] > best_result[ + 'similarities']: + best_result = results[0] + + rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format( + best_result['label'], best_result['similarities'], + best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + image = bridge.compressed_imgmsg_to_cv2(best_result['image']) + cv2.putText( + image, "{} ({:.2f}) {}".format( + best_result['label'], best_result['similarities'], + best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), (10, 20), + cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 8, 1) + cv2.putText( + image, "{} ({:.2f}) {}".format( + best_result['label'], best_result['similarities'], + best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), (10, 20), + cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 2, 1) + cv2.imwrite(filename, image) + rospy.logwarn("save images to {}".format(filename)) + + # pubish as card + self.publish_google_chat_card( + translator.translate('We saw ' + query, dest="ja").text, filename) + + except Exception as e: + rospy.logerr("Query failed {}".format(e)) + + def query_chat(self, query, end_time, limit=30): + rospy.logwarn("Query chat until {}".format(end_time)) + meta_query = {'inserted_at': {"$lt": end_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, + json.dumps(meta_query, + default=json_util.default)),) + chat_msgs = self.query( + database='jsk_robot_lifelog', + collection='strelka', + # type = 'google_chat_ros/MessageEvent', + type='dialogflow_task_executive/DialogTextActionResult', + single=False, + limit=limit, + meta_query=StringPairList(meta_tuple), + sort_query=StringPairList([StringPair('_meta.inserted_at', '-1')])) + + # show chats + results = [] + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp( + meta['timestamp'] // 1000000000, JST) + # message = msg.message.argument_text or msg.message.text + message = msg.result.response.query + result = { + 'message': message, + 'timestamp': timestamp, + 'similarity': difflib.SequenceMatcher(None, query, message).ratio(), + 'msg': msg, + 'meta': meta + } + if msg.result.response.action in ['make_reply', 'input.unknown']: + rospy.logwarn( + "Found chat messages {} at {} but skipping (action:{})".format( + result['message'], + result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), + msg.result.response.action)) + else: + results.append(result) + rospy.logwarn("Found chat messages {} at {} ({}:{:.2f})".format( + result['message'], + result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, + result['similarity'])) + + return results, chat_msgs + + def query_images_and_classify(self, query, start_time, end_time, limit=30): + rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) + meta_query = { + 'input_topic': '/spot/camera/hand_color/image/compressed/throttled', + 'inserted_at': { + "$gt": start_time, + "$lt": end_time + } + } + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, + json.dumps(meta_query, + default=json_util.default)),) + msgs = self.query(database='jsk_robot_lifelog', + collection='strelka', + type='sensor_msgs/CompressedImage', + single=False, + limit=limit, + meta_query=StringPairList(meta_tuple), + sort_query=StringPairList( + [StringPair('_meta.inserted_at', '-1')])) + + rospy.loginfo("Found {} images".format(len(msgs.messages))) + if len(msgs.messages) == 0: + rospy.logwarn("no images was found") + + # get contents of images + results = [] + for msg, meta in zip(msgs.messages, msgs.metas): + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp( + meta['timestamp'] // 1000000000, JST) + # rospy.logwarn("Found images at {}".format(timestamp)) + + goal = ClassificationTaskGoal() + goal.compressed_image = deserialise_message(msg) + goal.queries = [query] + self.classification_ac.send_goal(goal) + self.classification_ac.wait_for_result() + result = self.classification_ac.get_result() + idx = result.result.label_names.index(query) + #similarities = result.result.probabilities + similarities = result.result.label_proba + # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) + rospy.logwarn("Found images at {} .. {}".format( + timestamp, + list( + zip(result.result.label_names, + map(lambda x: "{:.2f}".format(x), similarities))))) + results.append({ + 'label': result.result.label_names[idx], + 'probabilities': result.result.probabilities[idx], + 'similarities': result.result.label_proba[idx], + 'image': goal.compressed_image, + 'timestamp': timestamp + }) + + # we do not sorty by probabilites, becasue we also need oldest timestamp + return results + + def publish_google_chat_card(self, text, filename=None): + goal = SendMessageGoal() + goal.text = text + if filename: + goal.cards = [ + Card(sections=[ + Section(widgets=[WidgetMarkup(image=Image(localpath=filename))]) + ]) + ] + goal.space = 'spaces/AAAAoTwLBL0' + rospy.logwarn("send {} to {}".format(goal.text, goal.space)) + self.chat_ros_ac.send_goal_and_wait(goal, + execute_timeout=rospy.Duration(0.10)) + + def text_to_salience(self, text): + goal = AnalyzeTextGoal() + goal.text = text + self.analyze_text_ac.send_goal(goal) + self.analyze_text_ac.wait_for_result() + entity = self.analyze_text_ac.get_result() + if len(entity.entities) > 0: + return entity.entities[0].name + else: + return text + + def cb(self, msg): + if msg._type == 'google_chat_ros.msg/MessageEvent': + text = message.message.argument_text.lstrip( + ) or message.message.text.lstrip() + message = self.text_to_salience(text) + rospy.logwarn("Received chat message '{}'".format(text)) + + # ask dialogflow for intent + goal = DialogTextGoal() + goal.query = text + self.dialogflow_ac.send_goal(goal) + self.dialogflow_ac.wait_for_result() + result = self.dialogflow_ac.get_result() + elif msg._type == 'dialogflow_task_executive/DialogTextActionResult': + result = msg.result + else: + rospy.logerr("Unknown message type {}".format(msg._type)) + return + + try: + rospy.logwarn("received dialogflow query '{}'".format( + result.response.query)) + rospy.logwarn("received dialogflow action '{}'".format( + result.response.action)) + if result.response.action == 'input.unknown': + self.publish_google_chat_card("🤖") + elif result.response.action == 'make_reply': + self.make_reply( + self.text_to_salience( + translator.translate(result.response.query, dest="en").text)) + else: + self.publish_google_chat_card(result.response.response) + + except Exception as e: + rospy.logerr("Callback failed {}".format(e)) + self.publish_google_chat_card("💀 {}".format(e)) + + +if __name__ == '__main__': + rospy.init_node('test', anonymous=True) + ml = MessageListener() + #ml.cb2(0) + #ml.cb2('chair') + rospy.spin() From 6389a4ac2c120f48d00cc677b6515d3827b3ec21 Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Thu, 13 Apr 2023 12:50:56 +0900 Subject: [PATCH 02/60] [database_talker] update gitignore --- database_talker/.gitignore | 1 + database_talker/.vscode/c_cpp_properties.json | 67 ------------------- database_talker/.vscode/settings.json | 12 ---- 3 files changed, 1 insertion(+), 79 deletions(-) delete mode 100644 database_talker/.vscode/c_cpp_properties.json delete mode 100644 database_talker/.vscode/settings.json diff --git a/database_talker/.gitignore b/database_talker/.gitignore index a6ed6d6c16..d584a46726 100644 --- a/database_talker/.gitignore +++ b/database_talker/.gitignore @@ -1,2 +1,3 @@ auth/* test_db/* +.vscode/* diff --git a/database_talker/.vscode/c_cpp_properties.json b/database_talker/.vscode/c_cpp_properties.json deleted file mode 100644 index d5820d2fdf..0000000000 --- a/database_talker/.vscode/c_cpp_properties.json +++ /dev/null @@ -1,67 +0,0 @@ -{ - "configurations": [ - { - "browse": { - "databaseFilename": "${default}", - "limitSymbolsToIncludedHeaders": false - }, - "includePath": [ - "/home/sktometometo/ros/ws_jsk_fetch/devel/include/**", - "/opt/ros/melodic/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/amcl/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/audio_video_recorder/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/base_local_planner/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/BehaviorTree.ROS/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/carrot_planner/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/clear_costmap_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/complex_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/costmap_2d/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_demos/jsk_2015_06_hrp_drc/drc_task_common/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/dwa_local_planner/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/euslisp/Euslisp/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_ros/fetch_depth_layer/include/**", - "/opt/ros/melodic/share/fetch_gazebo/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_ros/fetch_ikfast_plugin/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/fetch_open_auto_dock/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/global_planner/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/imagesift/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_pcl_ros/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_pcl_ros_utils/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_perception/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/jsk_recognition_utils/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/jsk_robot_startup/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/jsk_topic_tools/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_3rdparty/jsk_ros_patch/laser_filters_jsk_patch/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/map_server/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/strands-project/mongodb_store/mongodb_store/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/move_base/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/move_slow_and_clear/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/nav_core/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/navfn/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_pr2_robot/pr2_base_trajectory_action/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/IntelRealSense/realsense-ros/realsense2_camera/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_recognition/resized_image_transport/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/mikeferguson/robot_calibration/robot_calibration/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/robot_controllers/robot_controllers/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/fetchrobotics/robot_controllers/robot_controllers_interface/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_common/ros_lock/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_roseus/roseus_bt/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/rosserial/rosserial_server/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/rosserial/rosserial_test/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/rotate_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-drivers/audio_common/sound_play/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/speak_and_wait_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/trigger_behavior_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/jsk-ros-pkg/jsk_robot/jsk_robot_common/update_move_base_parameter_recovery/include/**", - "/home/sktometometo/ros/ws_jsk_fetch/src/ros-planning/navigation/voxel_grid/include/**", - "/usr/include/**" - ], - "name": "ROS", - "intelliSenseMode": "gcc-x64", - "compilerPath": "/usr/bin/gcc", - "cStandard": "gnu11", - "cppStandard": "c++14" - } - ], - "version": 4 -} \ No newline at end of file diff --git a/database_talker/.vscode/settings.json b/database_talker/.vscode/settings.json deleted file mode 100644 index b4506c8de7..0000000000 --- a/database_talker/.vscode/settings.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "python.autoComplete.extraPaths": [ - "/home/sktometometo/ros/ws_jsk_fetch/devel/lib/python2.7/dist-packages", - "/opt/ros/melodic/lib/python2.7/dist-packages", - "/home/sktometometo/raisimlib/install/lib" - ], - "python.analysis.extraPaths": [ - "/home/sktometometo/ros/ws_jsk_fetch/devel/lib/python2.7/dist-packages", - "/opt/ros/melodic/lib/python2.7/dist-packages", - "/home/sktometometo/raisimlib/install/lib" - ] -} From 4612b0cd41408287a35f31e5cd52e26802acc662 Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Thu, 13 Apr 2023 12:51:36 +0900 Subject: [PATCH 03/60] [database_talker] add empty directories --- database_talker/auth/.keepme | 0 database_talker/test_db/.keepme | 0 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 database_talker/auth/.keepme create mode 100644 database_talker/test_db/.keepme diff --git a/database_talker/auth/.keepme b/database_talker/auth/.keepme new file mode 100644 index 0000000000..e69de29bb2 diff --git a/database_talker/test_db/.keepme b/database_talker/test_db/.keepme new file mode 100644 index 0000000000..e69de29bb2 From 10ba553ec0f8f685a6143ae15e0cd9ad1883545f Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Thu, 13 Apr 2023 13:30:12 +0900 Subject: [PATCH 04/60] [database_talker] fix hoge.py --- database_talker/scripts/hoge.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py index e052e6f658..37fbb68be9 100644 --- a/database_talker/scripts/hoge.py +++ b/database_talker/scripts/hoge.py @@ -54,9 +54,9 @@ def __init__(self): self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) - rospy.loginfo("wait for '/classification/clip_server'") + rospy.loginfo("wait for '/classification/inference_server'") self.classification_ac = actionlib.SimpleActionClient( - '/classification/clip_server', ClassificationTaskAction) + '/classification/inference_server', ClassificationTaskAction) self.classification_ac.wait_for_server() ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py From 5a56b74a177ca897bab15b35f4a699799195d7e6 Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Thu, 13 Apr 2023 13:30:34 +0900 Subject: [PATCH 05/60] [database_talker] fix bugs --- database_talker/launch/demo.launch | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/database_talker/launch/demo.launch b/database_talker/launch/demo.launch index 9cad704b2b..c1fddc9059 100644 --- a/database_talker/launch/demo.launch +++ b/database_talker/launch/demo.launch @@ -43,17 +43,20 @@ - - + + + pixel_format: yuyv + - + From 5b638ad5ec1b7a0f6a18a538d515ea8887c3d090 Mon Sep 17 00:00:00 2001 From: Naoto Tsukamoto Date: Fri, 14 Apr 2023 08:58:32 +0900 Subject: [PATCH 06/60] Modify hoge.py to work in Fetch1075 --- database_talker/scripts/hoge.py | 645 ++++++++++++++++---------------- 1 file changed, 331 insertions(+), 314 deletions(-) diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py index 37fbb68be9..c6aead2542 100644 --- a/database_talker/scripts/hoge.py +++ b/database_talker/scripts/hoge.py @@ -1,344 +1,361 @@ #!/usr/bin/env python # -*- coding: utf-8 -*- + +import rospy + +import actionlib +from bson import json_util import copy +import cv2 import datetime import difflib import json import os +import random +import rospkg import shutil +import sys import tempfile +import time +import traceback -import actionlib -import cv2 -import rospkg -import rospy -from bson import json_util from dateutil import tz - JST = tz.gettz('Asia/Tokyo') from cv_bridge import CvBridge - bridge = CvBridge() from googletrans import Translator - +from googletrans.models import Translated translator = Translator() -from dialogflow_task_executive.msg import (DialogTextAction, - DialogTextActionResult, - DialogTextGoal) -from google_chat_ros.msg import (Card, Image, MessageEvent, Section, - SendMessageAction, SendMessageGoal, - WidgetMarkup) -from jsk_recognition_msgs.msg import (ClassificationTaskAction, - ClassificationTaskGoal) from mongodb_store.util import deserialise_message -from mongodb_store_msgs.msg import StringPair, StringPairList + +from google_chat_ros.msg import Card, Section, WidgetMarkup, Image +from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal + +from mongodb_store_msgs.msg import StringPairList, StringPair from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest + from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal +from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult -class MessageListener(object): +from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal +from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal - def __init__(self): - rospy.loginfo("wait for '/google_chat_ros/send'") - self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', - SendMessageAction) - self.chat_ros_ac.wait_for_server() - #self.pub = rospy.Publisher('/google_chat_ros/send/goal', SendMessageActionGoal, queue_size=1) - - rospy.loginfo("wait for '/message_store/query_messages'") - rospy.wait_for_service('/message_store/query_messages') - self.query = rospy.ServiceProxy('/message_store/query_messages', - MongoQueryMsg) - - rospy.loginfo("wait for '/classification/inference_server'") - self.classification_ac = actionlib.SimpleActionClient( - '/classification/inference_server', ClassificationTaskAction) - self.classification_ac.wait_for_server() - - ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py - # rospy.loginfo("wait for '/dialogflow_client/text_action'") - # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) - # self.dialogflow_ac.wait_for_server() - - rospy.loginfo("wait for '/analyze_text/text'") - self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text', - AnalyzeTextAction) - self.analyze_text_ac.wait_for_server() - - # rospy.loginfo("subscribe '/google_chat_ros/message_activity'") - # self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) - rospy.loginfo("subscribe '/dialogflow_client/text_action/result'") - self.sub = rospy.Subscriber('/dialogflow_client/text_action/result', - DialogTextActionResult, self.cb) - - rospy.loginfo("all done, ready") - - def make_reply(self, query): - rospy.logwarn("Run make_reply({})".format(query)) - # look for images - try: - # get chat message - results, chat_msgs = self.query_chat(query, datetime.datetime.now(JST)) - if len(results) == 0 and len(chat_msgs.metas) > 0: - meta = json.loads(chat_msgs.metas[-1].pairs[0].second) - results, chat_msgs = self.query_chat( - query, - datetime.datetime.fromtimestamp(meta['timestamp'] // 1000000000, - JST)) - # sort based on similarity with 'query' - chat_msgs_sorted = sorted(results, - key=lambda x: x['similarity'], - reverse=True) - - if len(chat_msgs_sorted) == 0: - rospy.logwarn("no chat message was found") - return - else: - # query images that was taken when chat_msgs are stored - msg = chat_msgs_sorted[0]['msg'] - meta = chat_msgs_sorted[0]['meta'] - text = chat_msgs_sorted[0]['message'] - timestamp = chat_msgs_sorted[0]['timestamp'] - #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) - # text = msg.message.argument_text or msg.message.text - # timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - rospy.logwarn( - "Found message '{}' at {}, corresponds to query {}".format( - text, timestamp.strftime('%Y-%m-%d %H:%M:%S'), query)) - - start_time = timestamp - datetime.timedelta(minutes=30) - end_time = timestamp + datetime.timedelta(minutes=30) - results = self.query_images_and_classify(query=query, - start_time=start_time, - end_time=end_time) - - end_time = results[-1]['timestamp'] - # sort - results = sorted(results, key=lambda x: x['similarities'], reverse=True) - rospy.loginfo("Probabilities of all images {}".format( - list(map(lambda x: (x['label'], x['similarities']), results)))) - best_result = results[0] - - # if probability is too low, try again - while len(results) > 0 and results[0]['similarities'] < 0.25: - start_time = end_time - datetime.timedelta(hours=24) - timestamp = datetime.datetime.now(JST) - results = self.query_images_and_classify(query=query, - start_time=start_time, - end_time=end_time, - limit=300) - if len(results) > 0: - end_time = results[-1]['timestamp'] - # sort - results = sorted(results, - key=lambda x: x['similarities'], - reverse=True) - #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) - if len(results) > 0 and results[0]['similarities'] > best_result[ - 'similarities']: - best_result = results[0] +from openai_ros.srv import Completion - rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format( - best_result['label'], best_result['similarities'], - best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) - filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) - image = bridge.compressed_imgmsg_to_cv2(best_result['image']) - cv2.putText( - image, "{} ({:.2f}) {}".format( - best_result['label'], best_result['similarities'], - best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), (10, 20), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 8, 1) - cv2.putText( - image, "{} ({:.2f}) {}".format( - best_result['label'], best_result['similarities'], - best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), (10, 20), - cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 0), 2, 1) - cv2.imwrite(filename, image) - rospy.logwarn("save images to {}".format(filename)) - - # pubish as card - self.publish_google_chat_card( - translator.translate('We saw ' + query, dest="ja").text, filename) - - except Exception as e: - rospy.logerr("Query failed {}".format(e)) - - def query_chat(self, query, end_time, limit=30): - rospy.logwarn("Query chat until {}".format(end_time)) - meta_query = {'inserted_at': {"$lt": end_time}} - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, - json.dumps(meta_query, - default=json_util.default)),) - chat_msgs = self.query( - database='jsk_robot_lifelog', - collection='strelka', - # type = 'google_chat_ros/MessageEvent', - type='dialogflow_task_executive/DialogTextActionResult', - single=False, - limit=limit, - meta_query=StringPairList(meta_tuple), - sort_query=StringPairList([StringPair('_meta.inserted_at', '-1')])) - - # show chats - results = [] - for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): - msg = deserialise_message(msg) - meta = json.loads(meta.pairs[0].second) - timestamp = datetime.datetime.fromtimestamp( - meta['timestamp'] // 1000000000, JST) - # message = msg.message.argument_text or msg.message.text - message = msg.result.response.query - result = { - 'message': message, - 'timestamp': timestamp, - 'similarity': difflib.SequenceMatcher(None, query, message).ratio(), - 'msg': msg, - 'meta': meta - } - if msg.result.response.action in ['make_reply', 'input.unknown']: - rospy.logwarn( - "Found chat messages {} at {} but skipping (action:{})".format( - result['message'], - result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), - msg.result.response.action)) - else: - results.append(result) - rospy.logwarn("Found chat messages {} at {} ({}:{:.2f})".format( - result['message'], - result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, - result['similarity'])) - - return results, chat_msgs - - def query_images_and_classify(self, query, start_time, end_time, limit=30): - rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) - meta_query = { - 'input_topic': '/spot/camera/hand_color/image/compressed/throttled', - 'inserted_at': { - "$gt": start_time, - "$lt": end_time - } - } - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, - json.dumps(meta_query, - default=json_util.default)),) - msgs = self.query(database='jsk_robot_lifelog', - collection='strelka', - type='sensor_msgs/CompressedImage', - single=False, - limit=limit, - meta_query=StringPairList(meta_tuple), - sort_query=StringPairList( - [StringPair('_meta.inserted_at', '-1')])) - - rospy.loginfo("Found {} images".format(len(msgs.messages))) - if len(msgs.messages) == 0: - rospy.logwarn("no images was found") - - # get contents of images - results = [] - for msg, meta in zip(msgs.messages, msgs.metas): - meta = json.loads(meta.pairs[0].second) - timestamp = datetime.datetime.fromtimestamp( - meta['timestamp'] // 1000000000, JST) - # rospy.logwarn("Found images at {}".format(timestamp)) - - goal = ClassificationTaskGoal() - goal.compressed_image = deserialise_message(msg) - goal.queries = [query] - self.classification_ac.send_goal(goal) - self.classification_ac.wait_for_result() - result = self.classification_ac.get_result() - idx = result.result.label_names.index(query) - #similarities = result.result.probabilities - similarities = result.result.label_proba - # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) - rospy.logwarn("Found images at {} .. {}".format( - timestamp, - list( - zip(result.result.label_names, - map(lambda x: "{:.2f}".format(x), similarities))))) - results.append({ - 'label': result.result.label_names[idx], - 'probabilities': result.result.probabilities[idx], - 'similarities': result.result.label_proba[idx], - 'image': goal.compressed_image, - 'timestamp': timestamp - }) - - # we do not sorty by probabilites, becasue we also need oldest timestamp - return results - - def publish_google_chat_card(self, text, filename=None): - goal = SendMessageGoal() - goal.text = text - if filename: - goal.cards = [ - Card(sections=[ - Section(widgets=[WidgetMarkup(image=Image(localpath=filename))]) - ]) - ] - goal.space = 'spaces/AAAAoTwLBL0' - rospy.logwarn("send {} to {}".format(goal.text, goal.space)) - self.chat_ros_ac.send_goal_and_wait(goal, - execute_timeout=rospy.Duration(0.10)) - - def text_to_salience(self, text): - goal = AnalyzeTextGoal() - goal.text = text - self.analyze_text_ac.send_goal(goal) - self.analyze_text_ac.wait_for_result() - entity = self.analyze_text_ac.get_result() - if len(entity.entities) > 0: - return entity.entities[0].name - else: - return text - - def cb(self, msg): - if msg._type == 'google_chat_ros.msg/MessageEvent': - text = message.message.argument_text.lstrip( - ) or message.message.text.lstrip() - message = self.text_to_salience(text) - rospy.logwarn("Received chat message '{}'".format(text)) - - # ask dialogflow for intent - goal = DialogTextGoal() - goal.query = text - self.dialogflow_ac.send_goal(goal) - self.dialogflow_ac.wait_for_result() - result = self.dialogflow_ac.get_result() - elif msg._type == 'dialogflow_task_executive/DialogTextActionResult': - result = msg.result - else: - rospy.logerr("Unknown message type {}".format(msg._type)) - return - - try: - rospy.logwarn("received dialogflow query '{}'".format( - result.response.query)) - rospy.logwarn("received dialogflow action '{}'".format( - result.response.action)) - if result.response.action == 'input.unknown': - self.publish_google_chat_card("🤖") - elif result.response.action == 'make_reply': - self.make_reply( - self.text_to_salience( - translator.translate(result.response.query, dest="en").text)) - else: - self.publish_google_chat_card(result.response.response) - - except Exception as e: - rospy.logerr("Callback failed {}".format(e)) - self.publish_google_chat_card("💀 {}".format(e)) +class MessageListener(object): + def __init__(self): + rospy.loginfo("wait for '/google_chat_ros/send'") + self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) + self.chat_ros_ac.wait_for_server() + #self.pub = rospy.Publisher('/google_chat_ros/send/goal', SendMessageActionGoal, queue_size=1) + + rospy.loginfo("wait for '/message_store/query_messages'") + rospy.wait_for_service('/message_store/query_messages') + self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) + + rospy.loginfo("wait for '/classification/inference_server'") + self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction) + self.classification_ac.wait_for_server() + + rospy.loginfo("wait for '/vqa/inference_server'") + self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction) + self.vqa_ac.wait_for_server() + + # https://github.com/k-okada/openai_ros + # this requres apt install python3.7 python3.7-venv + rospy.loginfo("wait for '/openai/get_response'") + rospy.wait_for_service('/openai/get_response') + self.completion = rospy.ServiceProxy('/openai/get_response', Completion) + + ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py + rospy.loginfo("wait for '/dialogflow_client/text_action'") + self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) + self.dialogflow_ac.wait_for_server() + + rospy.loginfo("wait for '/analyze_text/text'") + self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction) + self.analyze_text_ac.wait_for_server() + + # rospy.loginfo("subscribe '/google_chat_ros/message_activity'") + # self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + rospy.loginfo("subscribe '/dialogflow_client/text_action/result'") + self.sub = rospy.Subscriber('/dialogflow_client/text_action/result', DialogTextActionResult, self.cb) + + rospy.loginfo("all done, ready") + + + def make_reply(self, message, lang="en"): + rospy.logwarn("Run make_reply({})".format(message)) + query = self.text_to_salience(message) + rospy.logwarn("query using salience word '{}'".format(query)) + # look for images + try: + # get chat message + timestamp = datetime.datetime.now(JST) + results, chat_msgs = self.query_dialogflow(query, timestamp, threshold=0.25) + retry = 0 + while retry < -1 and len(results) == 0 and len(chat_msgs.metas) > 0: + meta = json.loads(chat_msgs.metas[-1].pairs[0].second) + results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) + retry = retry + 1 + # sort based on similarity with 'query' + chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) + + if len(chat_msgs_sorted) == 0: + rospy.logwarn("no chat message was found") + else: + # query images that was taken when chat_msgs are stored + msg = chat_msgs_sorted[0]['msg'] + meta = chat_msgs_sorted[0]['meta'] + text = chat_msgs_sorted[0]['message'] + timestamp = chat_msgs_sorted[0]['timestamp'] + action = chat_msgs_sorted[0]['action'] + similarity = chat_msgs_sorted[0]['similarity'] + # query chat to get response + #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) + # text = msg.message.argument_text or msg.message.text + # timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, timestamp.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) + + start_time = timestamp-datetime.timedelta(minutes=300) + end_time = timestamp+datetime.timedelta(minutes=30) + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) + + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + best_result = results[0] + + # if probability is too low, try again + while len(results) > 0 and results[0]['similarities'] < 0.25: + start_time = end_time-datetime.timedelta(hours=24) + timestamp = datetime.datetime.now(JST) + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300) + if len(results) > 0: + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + if len(results) > 0 and results[0]['similarities'] > best_result['similarities']: + best_result = results[0] + + rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) + + ## make prompt + goal = VQATaskGoal() + goal.compressed_image = best_result['image'] + + # unusual objects + if random.randint(0,1) == 1: + goal.questions = ['what unusual things can be seen?'] + reaction = 'and you saw ' + else: + goal.questions = ['what is the atmosphere of this place?'] + reaction = 'and the atmosphere of the scene was ' + + # get vqa result + self.vqa_ac.send_goal(goal) + self.vqa_ac.wait_for_result() + result = self.vqa_ac.get_result() + reaction += result.result.result[0].answer + if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]: + reaction += " and you felt " + chat_msgs_sorted[0]['action'] + + # make prompt + prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \ + reaction + ' in your memory of that moment, what would you reply? '+ \ + 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang]) + result = self.completion(prompt=prompt,temperature=0) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("result = {}".format(result)) + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, best_result, prompt) + self.publish_google_chat_card(result.text, filename) + + except Exception as e: + raise ValueError("Query failed {} {}".format(e, traceback.format_exc())) + + + def write_image_with_annotation(self, filename, best_result, prompt): + image = bridge.compressed_imgmsg_to_cv2(best_result['image']) + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 8, 1) + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 2, 1) + string_width = 70 + for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length + text = prompt[i:i+string_width] + cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 4, 1) + cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 1, 1) + cv2.imwrite(filename, image) + rospy.logwarn("save images to {}".format(filename)) + + + def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): + rospy.logwarn("Query dialogflow until {}".format(end_time)) + meta_query= {'inserted_at': {"$lt": end_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + chat_msgs = self.query(database = 'jsk_robot_lifelog', + collection = 'fetch1075', + # type = 'google_chat_ros/MessageEvent', + type = 'dialogflow_task_executive/DialogTextActionResult', + single = False, + limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + + # show chats + results = [] + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # message = msg.message.argument_text or msg.message.text + message = msg.result.response.query + message_translate = self.translate(message, dest="en").text + result = {'message': message, + 'message_translate': message_translate, + 'timestamp': timestamp, + 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(), + 'action': msg.result.response.action, + 'msg': msg, + 'meta': meta} + if msg.result.response.action in ['make_reply', 'input.unknown']: + rospy.logwarn("Found dialogflow messages {} at {} but skipping (action:{})".format(result['message'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) + else: + rospy.logwarn("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) + if ( result['similarity'] > threshold): + results.append(result) + else: + rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold)) + + + return results, chat_msgs + + + def query_images_and_classify(self, query, start_time, end_time, limit=30): + rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) + # meta_query= {'input_topic': '/spot/camera/hand_color/image/compressed/throttled', + # 'inserted_at': {"$gt": start_time, "$lt": end_time}} + meta_query= {'input_topic': '/head_camera/rgb/image_rect_color/compressed/throttled', + 'inserted_at': {"$gt": start_time, "$lt": end_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + msgs = self.query(database = 'jsk_robot_lifelog', + collection = 'fetch1075', + type = 'sensor_msgs/CompressedImage', + single = False, + limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + + rospy.loginfo("Found {} images".format(len(msgs.messages))) + if len(msgs.messages) == 0: + rospy.logwarn("no images was found") + + # get contents of images + results = [] + for msg, meta in zip(msgs.messages, msgs.metas): + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # rospy.logwarn("Found images at {}".format(timestamp)) + + goal = ClassificationTaskGoal() + goal.compressed_image = deserialise_message(msg) + goal.queries = [query] + self.classification_ac.send_goal(goal) + self.classification_ac.wait_for_result() + result = self.classification_ac.get_result() + idx = result.result.label_names.index(query) + #similarities = result.result.probabilities + similarities = result.result.label_proba + # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) + rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities))))) + results.append({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx], 'image': goal.compressed_image, 'timestamp': timestamp}) + + # we do not sorty by probabilites, becasue we also need oldest timestamp + return results + + + def publish_google_chat_card(self, text, filename=None): + goal = SendMessageGoal() + goal.text = text + if filename: + goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] + goal.space = 'spaces/AAAAoTwLBL0' + rospy.logwarn("send {} to {}".format(goal.text, goal.space)) + self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) + + def text_to_salience(self, text): + goal = AnalyzeTextGoal() + goal.text = text; + self.analyze_text_ac.send_goal(goal) + self.analyze_text_ac.wait_for_result() + entity = self.analyze_text_ac.get_result() + if len(entity.entities) > 0: + return entity.entities[0].name + else: + return text + + def translate(self, text, dest): + return Translated(text=text, dest=dest, src="en", origin="unknown", pronunciation="unknown") + global translator + loop = 3 + while loop > 0: + try: + ret = translator.translate(text, dest="en") + return ret + except Exception as e: + rospy.logwarn("Faile to translate {}".format(e)) + time.sleep(1) + translator = Translator() + loop = loop - 1 + return Translated(text=text, dest=dest) + + + def cb(self, msg): + if msg._type == 'google_chat_ros.msg/MessageEvent': + text = message.message.argument_text.lstrip() or message.message.text.lstrip() + rospy.logwarn("Received chat message '{}'".format(text)) + + # ask dialogflow for intent + goal = DialogTextGoal() + goal.query = text + self.dialogflow_ac.send_goal(goal) + self.dialogflow_ac.wait_for_result() + result = self.dialogflow_ac.get_result() + elif msg._type == 'dialogflow_task_executive/DialogTextActionResult': + result = msg.result + else: + rospy.logerr("Unknown message type {}".format(msg._type)) + return + + try: + rospy.logwarn("received dialogflow query '{}'".format(result.response.query)) + rospy.logwarn("received dialogflow action '{}'".format(result.response.action)) + print(result.response) + if result.response.action == 'input.unknown': + self.publish_google_chat_card("🤖") + elif result.response.action == 'make_reply': + translated = self.translate(result.response.query, dest="en") + self.make_reply(translated.text, translated.src) + else: + self.publish_google_chat_card(result.response.response) + + except Exception as e: + rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) + self.publish_google_chat_card("💀 {}".format(e)) if __name__ == '__main__': - rospy.init_node('test', anonymous=True) - ml = MessageListener() - #ml.cb2(0) - #ml.cb2('chair') - rospy.spin() + rospy.init_node('test', anonymous=True) + ml = MessageListener() + #ml.cb2(0) + #ml.cb2('chair') + rospy.spin() From 00989ff2af5f2a143158125f0e18d32810fd1e3a Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 26 Apr 2023 13:44:42 +0900 Subject: [PATCH 07/60] clean up code, respont to original space etc --- database_talker/scripts/hoge.py | 72 +++++++++++++++++---------------- 1 file changed, 37 insertions(+), 35 deletions(-) diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py index c6aead2542..a849887f4c 100644 --- a/database_talker/scripts/hoge.py +++ b/database_talker/scripts/hoge.py @@ -50,6 +50,9 @@ class MessageListener(object): def __init__(self): + self.robot_name = rospy.get_param('robot/name') + rospy.loginfo("using '{}' database".format(self.robot_name)) + rospy.loginfo("wait for '/google_chat_ros/send'") self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) self.chat_ros_ac.wait_for_server() @@ -58,7 +61,7 @@ def __init__(self): rospy.loginfo("wait for '/message_store/query_messages'") rospy.wait_for_service('/message_store/query_messages') self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) - + rospy.loginfo("wait for '/classification/inference_server'") self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction) self.classification_ac.wait_for_server() @@ -82,10 +85,8 @@ def __init__(self): self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction) self.analyze_text_ac.wait_for_server() - # rospy.loginfo("subscribe '/google_chat_ros/message_activity'") - # self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) - rospy.loginfo("subscribe '/dialogflow_client/text_action/result'") - self.sub = rospy.Subscriber('/dialogflow_client/text_action/result', DialogTextActionResult, self.cb) + rospy.loginfo("subscribe '/google_chat_ros/message_activity'") + self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) rospy.loginfo("all done, ready") @@ -100,13 +101,13 @@ def make_reply(self, message, lang="en"): timestamp = datetime.datetime.now(JST) results, chat_msgs = self.query_dialogflow(query, timestamp, threshold=0.25) retry = 0 - while retry < -1 and len(results) == 0 and len(chat_msgs.metas) > 0: + while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: meta = json.loads(chat_msgs.metas[-1].pairs[0].second) results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) retry = retry + 1 # sort based on similarity with 'query' chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) - + if len(chat_msgs_sorted) == 0: rospy.logwarn("no chat message was found") else: @@ -123,18 +124,21 @@ def make_reply(self, message, lang="en"): # timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, timestamp.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) - start_time = timestamp-datetime.timedelta(minutes=300) + # query images when chat was received (+- 30 min) + start_time = timestamp-datetime.timedelta(minutes=30) end_time = timestamp+datetime.timedelta(minutes=30) results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) - end_time = results[-1]['timestamp'] + if len(results) > 0: + end_time = results[-1]['timestamp'] + # sort results = sorted(results, key=lambda x: x['similarities'], reverse=True) rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) best_result = results[0] - # if probability is too low, try again while len(results) > 0 and results[0]['similarities'] < 0.25: + start_time = end_time-datetime.timedelta(hours=24) timestamp = datetime.datetime.now(JST) results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300) @@ -178,8 +182,8 @@ def make_reply(self, message, lang="en"): # pubish as card filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) self.write_image_with_annotation(filename, best_result, prompt) - self.publish_google_chat_card(result.text, filename) - + return {'text': result.text, 'filename': filename} + except Exception as e: raise ValueError("Query failed {} {}".format(e, traceback.format_exc())) @@ -204,12 +208,12 @@ def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): meta_query= {'inserted_at': {"$lt": end_time}} meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) chat_msgs = self.query(database = 'jsk_robot_lifelog', - collection = 'fetch1075', + collection = self.robot_name, # type = 'google_chat_ros/MessageEvent', type = 'dialogflow_task_executive/DialogTextActionResult', single = False, limit = limit, - meta_query = StringPairList(meta_tuple), + meta_query = StringPairList(meta_tuple), sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) # show chats @@ -236,27 +240,25 @@ def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): results.append(result) else: rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold)) - + return results, chat_msgs def query_images_and_classify(self, query, start_time, end_time, limit=30): rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) - # meta_query= {'input_topic': '/spot/camera/hand_color/image/compressed/throttled', - # 'inserted_at': {"$gt": start_time, "$lt": end_time}} - meta_query= {'input_topic': '/head_camera/rgb/image_rect_color/compressed/throttled', - 'inserted_at': {"$gt": start_time, "$lt": end_time}} + meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled', + 'inserted_at': {"$gt": start_time, "$lt": end_time}} meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) msgs = self.query(database = 'jsk_robot_lifelog', - collection = 'fetch1075', + collection = self.robot_name, type = 'sensor_msgs/CompressedImage', single = False, limit = limit, meta_query = StringPairList(meta_tuple), sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) - rospy.loginfo("Found {} images".format(len(msgs.messages))) + rospy.loginfo("Found {} images".format(len(msgs.messages))) if len(msgs.messages) == 0: rospy.logwarn("no images was found") @@ -284,12 +286,12 @@ def query_images_and_classify(self, query, start_time, end_time, limit=30): return results - def publish_google_chat_card(self, text, filename=None): + def publish_google_chat_card(self, text, space, filename=None): goal = SendMessageGoal() goal.text = text if filename: goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] - goal.space = 'spaces/AAAAoTwLBL0' + goal.space = space rospy.logwarn("send {} to {}".format(goal.text, goal.space)) self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) @@ -305,7 +307,6 @@ def text_to_salience(self, text): return text def translate(self, text, dest): - return Translated(text=text, dest=dest, src="en", origin="unknown", pronunciation="unknown") global translator loop = 3 while loop > 0: @@ -318,11 +319,13 @@ def translate(self, text, dest): translator = Translator() loop = loop - 1 return Translated(text=text, dest=dest) - - + + def cb(self, msg): - if msg._type == 'google_chat_ros.msg/MessageEvent': - text = message.message.argument_text.lstrip() or message.message.text.lstrip() + space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot + if msg._type == 'google_chat_ros/MessageEvent': + text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() + space = msg.space.name rospy.logwarn("Received chat message '{}'".format(text)) # ask dialogflow for intent @@ -342,20 +345,19 @@ def cb(self, msg): rospy.logwarn("received dialogflow action '{}'".format(result.response.action)) print(result.response) if result.response.action == 'input.unknown': - self.publish_google_chat_card("🤖") + self.publish_google_chat_card("🤖", space) elif result.response.action == 'make_reply': translated = self.translate(result.response.query, dest="en") - self.make_reply(translated.text, translated.src) + ret = self.make_reply(translated.text, translated.src) + self.publish_google_chat_card(ret['text'], space, ret['filename']) else: - self.publish_google_chat_card(result.response.response) - + self.publish_google_chat_card(result.response.response, space) + except Exception as e: rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) - self.publish_google_chat_card("💀 {}".format(e)) + self.publish_google_chat_card("💀 {}".format(e), space) if __name__ == '__main__': rospy.init_node('test', anonymous=True) ml = MessageListener() - #ml.cb2(0) - #ml.cb2('chair') rospy.spin() From 421a5659dc7b0ea71be36c3a39a0d321d747edda Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 1 Jun 2023 15:04:19 +0900 Subject: [PATCH 08/60] use date-period time, optimize translation process --- database_talker/scripts/hoge.py | 140 +++++++++++++++++++++----------- 1 file changed, 94 insertions(+), 46 deletions(-) diff --git a/database_talker/scripts/hoge.py b/database_talker/scripts/hoge.py index a849887f4c..e463ef5a1e 100644 --- a/database_talker/scripts/hoge.py +++ b/database_talker/scripts/hoge.py @@ -13,9 +13,11 @@ import json import os import random +import re import rospkg import shutil import sys +import yaml import tempfile import time import traceback @@ -91,20 +93,20 @@ def __init__(self): rospy.loginfo("all done, ready") - def make_reply(self, message, lang="en"): - rospy.logwarn("Run make_reply({})".format(message)) + def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ): + enddate = startdate+duration + rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate)) query = self.text_to_salience(message) rospy.logwarn("query using salience word '{}'".format(query)) # look for images try: # get chat message - timestamp = datetime.datetime.now(JST) - results, chat_msgs = self.query_dialogflow(query, timestamp, threshold=0.25) - retry = 0 - while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: - meta = json.loads(chat_msgs.metas[-1].pairs[0].second) - results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) - retry = retry + 1 + results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25) + # retry = 0 + # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: + # meta = json.loads(chat_msgs.metas[-1].pairs[0].second) + # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) + # retry = retry + 1 # sort based on similarity with 'query' chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) @@ -115,27 +117,32 @@ def make_reply(self, message, lang="en"): msg = chat_msgs_sorted[0]['msg'] meta = chat_msgs_sorted[0]['meta'] text = chat_msgs_sorted[0]['message'] - timestamp = chat_msgs_sorted[0]['timestamp'] + startdate = chat_msgs_sorted[0]['timestamp'] action = chat_msgs_sorted[0]['action'] similarity = chat_msgs_sorted[0]['similarity'] # query chat to get response #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) # text = msg.message.argument_text or msg.message.text - # timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, timestamp.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) + # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) - # query images when chat was received (+- 30 min) - start_time = timestamp-datetime.timedelta(minutes=30) - end_time = timestamp+datetime.timedelta(minutes=30) + # query images when chat was received + start_time = startdate # startdate is updated with found chat space + end_time = enddate # enddate is not modified within this function, it is given from chat results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) - if len(results) > 0: - end_time = results[-1]['timestamp'] + # no images found + if len(results) == 0: + return {'text': '記憶がありません🤯'} + + end_time = results[-1]['timestamp'] # sort results = sorted(results, key=lambda x: x['similarities'], reverse=True) - rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results)))) best_result = results[0] + + ''' # if probability is too low, try again while len(results) > 0 and results[0]['similarities'] < 0.25: @@ -151,32 +158,28 @@ def make_reply(self, message, lang="en"): best_result = results[0] rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) + ''' ## make prompt - goal = VQATaskGoal() - goal.compressed_image = best_result['image'] - - # unusual objects - if random.randint(0,1) == 1: - goal.questions = ['what unusual things can be seen?'] - reaction = 'and you saw ' - else: - goal.questions = ['what is the atmosphere of this place?'] - reaction = 'and the atmosphere of the scene was ' - - # get vqa result - self.vqa_ac.send_goal(goal) - self.vqa_ac.wait_for_result() - result = self.vqa_ac.get_result() - reaction += result.result.result[0].answer + reaction = self.describe_image_scene(best_result['image']) if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]: reaction += " and you felt " + chat_msgs_sorted[0]['action'] + rospy.loginfo("reaction = {}".format(reaction)) # make prompt prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \ - reaction + ' in your memory of that moment, what would you reply? '+ \ + 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \ 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang]) - result = self.completion(prompt=prompt,temperature=0) + loop = 0 + result = None + while loop < 3 and result is None: + try: + result = self.completion(prompt=prompt,temperature=0) + except rospy.ServiceException as e: + rospy.logerr("Service call failed: %s"%e) + result = None + loop += 1 + result.text = result.text.lstrip() rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("result = {}".format(result)) # pubish as card @@ -203,19 +206,27 @@ def write_image_with_annotation(self, filename, best_result, prompt): rospy.logwarn("save images to {}".format(filename)) - def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): - rospy.logwarn("Query dialogflow until {}".format(end_time)) - meta_query= {'inserted_at': {"$lt": end_time}} + def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0): + rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time)) + meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) chat_msgs = self.query(database = 'jsk_robot_lifelog', collection = self.robot_name, # type = 'google_chat_ros/MessageEvent', type = 'dialogflow_task_executive/DialogTextActionResult', single = False, - limit = limit, + # limit = limit, meta_query = StringPairList(meta_tuple), sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + # optimization... send translate once + messages = '' + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + message = msg.result.response.query.replace('\n','') + messages += message + '\n' + messages = self.translate(messages, dest="en").text.split('\n') + # show chats results = [] for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): @@ -224,7 +235,8 @@ def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) # message = msg.message.argument_text or msg.message.text message = msg.result.response.query - message_translate = self.translate(message, dest="en").text + #message_translate = self.translate(message, dest="en").text + message_translate = messages.pop(0).strip() result = {'message': message, 'message_translate': message_translate, 'timestamp': timestamp, @@ -233,9 +245,9 @@ def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): 'msg': msg, 'meta': meta} if msg.result.response.action in ['make_reply', 'input.unknown']: - rospy.logwarn("Found dialogflow messages {} at {} but skipping (action:{})".format(result['message'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) + rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) else: - rospy.logwarn("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) + rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) if ( result['similarity'] > threshold): results.append(result) else: @@ -245,7 +257,7 @@ def query_dialogflow(self, query, end_time, limit=30, threshold=0.0): return results, chat_msgs - def query_images_and_classify(self, query, start_time, end_time, limit=30): + def query_images_and_classify(self, query, start_time, end_time, limit=10): rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled', 'inserted_at': {"$gt": start_time, "$lt": end_time}} @@ -285,6 +297,24 @@ def query_images_and_classify(self, query, start_time, end_time, limit=30): # we do not sorty by probabilites, becasue we also need oldest timestamp return results + def describe_image_scene(self, image): + goal = VQATaskGoal() + goal.compressed_image = image + + # unusual objects + if random.randint(0,1) == 1: + goal.questions = ['what unusual things can be seen?'] + reaction = 'you saw ' + else: + goal.questions = ['what is the atmosphere of this place?'] + reaction = 'the atmosphere of the scene was ' + + # get vqa result + self.vqa_ac.send_goal(goal) + self.vqa_ac.wait_for_result() + result = self.vqa_ac.get_result() + reaction += result.result.result[0].answer + return reaction def publish_google_chat_card(self, text, space, filename=None): goal = SendMessageGoal() @@ -347,9 +377,27 @@ def cb(self, msg): if result.response.action == 'input.unknown': self.publish_google_chat_card("🤖", space) elif result.response.action == 'make_reply': + self.publish_google_chat_card("・・・", space) + + parameters = yaml.safe_load(result.response.parameters) + startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24) + duration=datetime.timedelta(hours=24) + if parameters['date']: + startdate = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date']), "%Y-%m-%dT%H:%M:%S%z") + duration = datetime.timedelta(hours=24) + if parameters['date-period']: + startdate = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date-period']['startDate']), "%Y-%m-%dT%H:%M:%S%z") + duration = datetime.datetime.strptime(re.sub('\+(\d+):(\d+)$', '+\\1\\2',parameters['date-period']['endDate']), "%Y-%m-%dT%H:%M:%S%z") - startdate + print(startdate) + print(duration) translated = self.translate(result.response.query, dest="en") - ret = self.make_reply(translated.text, translated.src) - self.publish_google_chat_card(ret['text'], space, ret['filename']) + ret = self.make_reply(translated.text, translated.src, startdate=startdate, duration=duration) + if 'filename' in ret: + # upload text first, then upload images + self.publish_google_chat_card(ret['text'], space) + self.publish_google_chat_card('', space, ret['filename']) + else: + self.publish_google_chat_card(ret['text'], space) else: self.publish_google_chat_card(result.response.response, space) From 862e299f06661c33b4d9623b8b744c946a487574 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Tue, 21 Nov 2023 14:05:20 +0900 Subject: [PATCH 09/60] add make aibo diary --- database_talker/scripts/make_aibo_diary.py | 718 +++++++++++++++++++++ 1 file changed, 718 insertions(+) create mode 100755 database_talker/scripts/make_aibo_diary.py diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py new file mode 100755 index 0000000000..295e29f967 --- /dev/null +++ b/database_talker/scripts/make_aibo_diary.py @@ -0,0 +1,718 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import rospy +import logging + +import actionlib +from bson import json_util +# import copy +import cv2 +import datetime +# import difflib +import json +import os +# import random +import pickle +import re +import rospkg +# import shutil +# import sys +# import yaml +import tempfile +# import time +import traceback + +from dateutil import tz +JST = tz.gettz('Asia/Tokyo') + +from cv_bridge import CvBridge +bridge = CvBridge() + +# from googletrans import Translator +# from googletrans.models import Translated +# translator = Translator() + +from mongodb_store.util import deserialise_message + +from google_chat_ros.msg import Card, Section, WidgetMarkup, Image +from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal + +from mongodb_store_msgs.msg import StringPairList, StringPair +from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest, MongoQueryMsgResponse + +# from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal + +# from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult + +# from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal +# from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal + +from openai_ros.srv import Completion, CompletionResponse + +class MessageListener(object): + + def __init__(self): + #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') + self.pickle_file = "/tmp/activities.pickle" + self.robot_name = rospy.get_param('robot/name') + rospy.loginfo("using '{}' database".format(self.robot_name)) + + rospy.loginfo("wait for '/google_chat_ros/send'") + self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) + self.chat_ros_ac.wait_for_server() + + rospy.loginfo("wait for '/message_store/query_messages'") + rospy.wait_for_service('/message_store/query_messages') + self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) + + # rospy.loginfo("wait for '/classification/inference_server'") + # self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction) + # self.classification_ac.wait_for_server() + + # rospy.loginfo("wait for '/vqa/inference_server'") + # self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction) + # self.vqa_ac.wait_for_server() + + # # https://github.com/k-okada/openai_ros + # # this requres apt install python3.7 python3.7-venv + rospy.loginfo("wait for '/openai/get_response'") + rospy.wait_for_service('/openai/get_response') + self.completion = rospy.ServiceProxy('/openai/get_response', Completion) + + # ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py + # rospy.loginfo("wait for '/dialogflow_client/text_action'") + # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) + # self.dialogflow_ac.wait_for_server() + + # rospy.loginfo("wait for '/analyze_text/text'") + # self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction) + # self.analyze_text_ac.wait_for_server() + + rospy.loginfo("subscribe '/google_chat_ros/message_activity'") + self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + + rospy.loginfo("all done, ready") + + + def query_multiple_types(self, types, meta_tuple): + msgs = MongoQueryMsgResponse() + for _type in types: + msg = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + type = _type, + single = False, + # limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + msgs.messages.extend(msg.messages) + msgs.metas.extend(msg.metas) + return msgs + + def query_activities2(self, start_time, end_time): + rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) + meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + mongo_msgs = self.query_multiple_types(['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray'], + meta_tuple) + + activities_raw = [] + for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + #rospy.logdebug("{} {} {}".format(timestamp, meta['input_topic'], msg.status)) + if meta['stored_type'] == 'aibo_driver/StringStatus': + if msg.status in ['', 'none']: + continue + if 'body_touched' in meta['input_topic']: + activities_raw.append(msg.status+'_touched') + elif 'hungry' in meta['input_topic']: + activities_raw.append('energy_'+msg.status) + elif 'posture' in meta['input_topic']: + activities_raw.append('posture_'+msg.status) + elif 'sleepy' in meta['input_topic']: + activities_raw.append(msg.status) + elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray': + pass + return activities_raw + + def query_activities(self, start_time, end_time): + rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) + meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + mongo_msgs = self.query_multiple_types(['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray', + 'jsk_recognition_msgs/VQATaskActionResult'], + meta_tuple) + + activities = [] + for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + activities.append((msg, meta)) + return activities + + def query_activities_days(self, days=7): + # if we found cache file + if (os.path.exists(self.pickle_file) and + (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours + rospy.loginfo('Loading cached activities data {}'.format(datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file)))) + with open(self.pickle_file, 'rb') as f: + return pickle.load(f) + + activities = [] + today = datetime.date.today() + startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST) + for days_before in range(days): + activities_raw = self.query_activities(startdate-datetime.timedelta(hours=days_before*24), + startdate-datetime.timedelta(hours=(days_before-1)*24)) + if len(activities_raw): + activities.append(activities_raw) + + # dump msgs + with open(self.pickle_file, 'wb') as f: + pickle.dump(activities, f) + f.flush() + + return activities + + def make_aibo_activities_raw(self, activities): + diary_activities_raw = [] ## (timestamp, event) + for activities in activities: + rospy.loginfo("Found {} activities".format(len(activities))) + activities_raw = [] + for msg, meta in activities: + state = None + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # rospy.logwarn("{} {}".format(timestamp, meta['input_topic'])) + if meta['stored_type'] == 'aibo_driver/StringStatus': + if msg.status in ['', 'none']: + continue + if 'body_touched' in meta['input_topic']: + state = [msg.status+' touched'] + elif 'hungry' in meta['input_topic']: + state = ['energy is '+msg.status] + elif 'posture' in meta['input_topic']: + if msg.status in ['sleep']: + state = ['sleeping'] + elif msg.status in ['stand']: + state = ['standing'] + else: + state = [msg.status] + elif 'sleepy' in meta['input_topic']: + continue + #state = [msg.status] + elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray': + # remove duplicates from list https://stackoverflow.com/questions/7961363/removing-duplicates-in-lists + state = list(set(['found ' + state.name for state in msg.status])) + else: + continue + # create activities_raw + for s in state: + activities_raw.append((timestamp, s)) + diary_activities_raw.append(activities_raw) + ## + return diary_activities_raw ## (timestamp, event) + + def _make_activity(self): + # merge a list of dicts into a single dict + # https://stackoverflow.com/a/3495395 + always_events = {} + for key in {k: v for d in diary_activities_freq for k, v in d.items()}.keys(): + n = 0 + for k in diary_activities_freq: + n += 1 if k.get(key) else 0 + always_events.update({key : n/float(len(diary_activities_freq))}) + + noseen_events = {} + for key in {k: v for d in diary_activities_freq for k, v in d.items()}.keys(): + n = 0 + for k in diary_activities_freq: + if k.get(key): break + n += 1 + noseen_events.update({key: n}) + + for event in diary_activities_freq[0]: + freq_0 = diary_activities_freq[0] + freq_1 = diary_activities_freq[1] + if (not (event in freq_1 and int(freq_0[event]/ratio) <= freq_1[event])): + prompt += "{} : +{}\n".format(event, + int(freq_0[event]/ratio) - (freq_1[event] if event in freq_1 else 0)) + + def make_image_activities_raw(self, activities): + image_activities = {} + for msg, meta in activities[0]: + if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + answer = msg.result.result.result[0].answer + if answer not in image_activities.keys(): + image_activities.update({answer : timestamp}) + # + print(image_activities) + prompt = "From the list below, please select the most memorable event by number.\n\n" + n = 0 + for answer, timestamp in image_activities.items(): + print("----------------------" + answer) + prompt += "{}: {} ({})\n".format(n, answer, timestamp) + n += 1 + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + n = re.search(r'(\d+)', response) + if n: + answer, timestamp = list(image_activities.items())[max(int(n.group(1)),len(image_activities)-1)] + results = self.query_images_and_classify(query = answer, + start_time = timestamp - datetime.timedelta(minutes=5), + end_time = timestamp + datetime.timedelta(minutes=5), + classify = False) + if len(results) > 0: + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, results[0], answer) + return {'text': answer, 'filename': filename} + + def make_activity(self, activities = None): + if not activities: + activities = self.query_activities_days() + diary_activities_raw = self.make_aibo_activities_raw(activities) ## (timestamp, event) + + # check today + diary_activities_freq = [] + for activities_raw in diary_activities_raw: + activities_raw_state = [x[1] for x in activities_raw] + activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} + if len(activities_raw) > 0: + # rospy.loginfo("activities raw : {}".format(activities_raw)) + rospy.loginfo("activities freq : {} ({})".format(activities_freq, len(activities_freq))) + diary_activities_freq.append(activities_freq) + + # activities_events[event_name] = {'duration', datetime.timedelta} + diary_activities_events = [] + for activities_raw in diary_activities_raw: + activities_events = {} + for activities in activities_raw: + timestamp = activities[0] + event = activities[1] + if event in activities_events: + time_since_last_seen = activities_events[event]['last_seen'] - timestamp + if time_since_last_seen.seconds/60 < 30: # min + activities_events[event]['tmp_duration'] += time_since_last_seen + else: + # 'duration' keeps maximum duration + # if activities_events[event]['tmp_duration'] > activities_events[event]['duration']: + # activities_events[event]['duration'] = activities_events[event]['tmp_duration'] + # 'duration' keeps accumulated duration + activities_events[event]['duration'] += activities_events[event]['tmp_duration'] + activities_events[event]['tmp_duration'] = datetime.timedelta() + activities_events[event]['last_seen'] = timestamp + else: + activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta()}}) + #print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) + diary_activities_events.append(activities_events) + + for activities_events in diary_activities_events: + print("--") + for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): + print("{:24} : {:4.2f} min".format(event, duration['duration'].seconds/60)) + + prompt = "" + prompt += "\n 'action : duration'\n" + activities_events = diary_activities_events[0] # get todays activities + for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): + if duration['duration'].seconds > 0: + prompt += "{} : {} min\n".format(event, int(duration['duration'].seconds/60)) + + # estimate frequence in 24h + prompt += "\n 'action : increased time time than yesterday'\n" + more_yesterday_action = False + for event in diary_activities_events[0].keys(): + if event in diary_activities_events[1]: + duration = diary_activities_events[0][event]['duration'] - diary_activities_events[1][event]['duration'] + if duration.days > 0 and duration.seconds > 0: + prompt += "{} : {} min\n".format(event, int(duration.seconds/60)) + more_yesterday_action = True + if not more_yesterday_action: + prompt += "none\n" + + # + prompt += "\n 'action : number of days passed since you last did it'\n" + long_time_action = False + for event in diary_activities_events[0].keys(): + n = 1 + for diary_activities_event in diary_activities_events[1:]: + if event not in diary_activities_event.keys() or diary_activities_event[event]['duration'].seconds < 1: + n += 1 + else: + break + if n >= 2: + prompt += "{} : {} days\n".format(event, n) + long_time_action = True + if not long_time_action: + prompt += "none\n" + + rospy.logdebug(prompt) + return prompt + + def make_diary(self, language="Japanese"): + activities = self.query_activities_days() + # + topic_of_day = None + filname = False + image_activity = self.make_image_activities_raw(activities) + if image_activity: + topic_of_day = image_activity['text'] + filename = image_activity['filename'] + # + prompt = "You are a baby robot. You were taken care of by people around you." + if topic_of_day: + prompt = "Today, you are impressed by " + topic_of_day + "." + prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" + + prompt += self.make_activity(activities) + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + + #prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write a maximum 120 {language} charactors.\n\n".format(language = language) + response + prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + return {'text': response, 'filename': filename} + + def make_response(self, text, language="Japanese"): + if language=="Japanese": + text = self.openai_completion('Translate the following sentences to English "{}"'.format(text)) + prompt = "You are a baby robot. You were taken care of by people around you. The following data is a record of today's actions regarding what we always do, what we did more of yesterday, and What happened after a long time.\nIf your frined ask you as '{}', wow do you reply? Note, however, that you are a baby robot, so please make it a child-like response.\n\n".format(text) + self.make_activity() + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + + prompt = "Please rewrite the following response as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + return {'text': response} + + def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ): + enddate = startdate+duration + rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate)) + query = self.text_to_salience(message) + rospy.logwarn("query using salience word '{}'".format(query)) + # look for images + try: + # get chat message + results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25) + # retry = 0 + # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: + # meta = json.loads(chat_msgs.metas[-1].pairs[0].second) + # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) + # retry = retry + 1 + # sort based on similarity with 'query' + chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) + + if len(chat_msgs_sorted) == 0: + rospy.logwarn("no chat message was found") + else: + # query images that was taken when chat_msgs are stored + msg = chat_msgs_sorted[0]['msg'] + meta = chat_msgs_sorted[0]['meta'] + text = chat_msgs_sorted[0]['message'] + startdate = chat_msgs_sorted[0]['timestamp'] + action = chat_msgs_sorted[0]['action'] + similarity = chat_msgs_sorted[0]['similarity'] + # query chat to get response + #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) + # text = msg.message.argument_text or msg.message.text + # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) + + # query images when chat was received + start_time = startdate # startdate is updated with found chat space + end_time = enddate # enddate is not modified within this function, it is given from chat + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) + + # no images found + if len(results) == 0: + return {'text': '記憶がありません🤯'} + + end_time = results[-1]['timestamp'] + + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results)))) + best_result = results[0] + + ''' + # if probability is too low, try again + while len(results) > 0 and results[0]['similarities'] < 0.25: + + start_time = end_time-datetime.timedelta(hours=24) + timestamp = datetime.datetime.now(JST) + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300) + if len(results) > 0: + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + if len(results) > 0 and results[0]['similarities'] > best_result['similarities']: + best_result = results[0] + + rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) + ''' + + ## make prompt + reaction = self.describe_image_scene(best_result['image']) + if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]: + reaction += " and you felt " + chat_msgs_sorted[0]['action'] + rospy.loginfo("reaction = {}".format(reaction)) + + # make prompt + prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \ + 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \ + 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang]) + loop = 0 + result = None + while loop < 3 and result is None: + try: + result = self.completion(prompt=prompt,temperature=0) + except rospy.ServiceException as e: + rospy.logerr("Service call failed: %s"%e) + result = None + loop += 1 + result.text = result.text.lstrip().encode('utf8') + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("result = {}".format(result)) + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, best_result, prompt) + return {'text': result.text, 'filename': filename} + + except Exception as e: + raise ValueError("Query failed {} {}".format(e, traceback.format_exc())) + + + def openai_completion(self, prompt, temperature=0): + loop = 0 + result = None + while loop < 5 and result is None: + try: + result = self.completion(prompt=prompt,temperature=temperature) + except rospy.ServiceException as e: + rospy.logerr("Service call failed: %s"%e) + rospy.sleep(2) + result = None + loop += 1 + if result is None: + raise Exception('[ERROR] openni_completion failed to complete {}'.format(prompt)) + result.text = result.text.lstrip() + rospy.logdebug("prompt = {}".format(prompt)) + rospy.logdebug("result = {}".format(result)) + return result.text + + def write_image_with_annotation(self, filename, best_result, prompt): + image = bridge.compressed_imgmsg_to_cv2(best_result['image']) + if 'label' in best_result and 'similarities' in best_result: + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 8, 1) + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 2, 1) + h, w, c = image.shape + string_width = int(w/10) + for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length + text = prompt[i:i+string_width] + cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 4, 1) + cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 1, 1) + cv2.imwrite(filename, image) + rospy.logwarn("save images to {}".format(filename)) + + + def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0): + rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time)) + meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + chat_msgs = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + # type = 'google_chat_ros/MessageEvent', + type = 'dialogflow_task_executive/DialogTextActionResult', + single = False, + # limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + + # optimization... send translate once + messages = '' + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + message = msg.result.response.query.replace('\n','') + messages += message + '\n' + messages = self.translate(messages, dest="en").text.split('\n') + + # show chats + results = [] + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # message = msg.message.argument_text or msg.message.text + message = msg.result.response.query + #message_translate = self.translate(message, dest="en").text + message_translate = messages.pop(0).strip() + result = {'message': message, + 'message_translate': message_translate, + 'timestamp': timestamp, + 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(), + 'action': msg.result.response.action, + 'msg': msg, + 'meta': meta} + if msg.result.response.action in ['make_reply', 'input.unknown']: + rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) + else: + rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) + if ( result['similarity'] > threshold): + results.append(result) + else: + rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold)) + + + return results, chat_msgs + + + def query_images_and_classify(self, query, start_time, end_time, limit=10, classify=True): + rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) + meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled', + 'inserted_at': {"$gt": start_time, "$lt": end_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + msgs = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + type = 'sensor_msgs/CompressedImage', + single = False, + limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) + + rospy.loginfo("Found {} images".format(len(msgs.messages))) + if len(msgs.messages) == 0: + rospy.logwarn("no images was found") + + # get contents of images + results = [] + for msg, meta in zip(msgs.messages, msgs.metas): + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # rospy.logwarn("Found images at {}".format(timestamp)) + + result = {'query' : query, 'image' : deserialise_message(msg), 'timestamp': timestamp} + if classify: + goal = ClassificationTaskGoal() + goal.compressed_image = result['image'] + goal.queries = [query] + self.classification_ac.send_goal(goal) + self.classification_ac.wait_for_result() + result = self.classification_ac.get_result() + idx = result.result.label_names.index(query) + #similarities = result.result.probabilities + similarities = result.result.label_proba + # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) + rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities))))) + result.update({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx]}) + results.append(result) + + # we do not sorty by probabilites, becasue we also need oldest timestamp + return results + + def describe_image_scene(self, image): + goal = VQATaskGoal() + goal.compressed_image = image + + # unusual objects + if random.randint(0,1) == 1: + goal.questions = ['what unusual things can be seen?'] + reaction = 'you saw ' + else: + goal.questions = ['what is the atmosphere of this place?'] + reaction = 'the atmosphere of the scene was ' + + # get vqa result + self.vqa_ac.send_goal(goal) + self.vqa_ac.wait_for_result() + result = self.vqa_ac.get_result() + reaction += result.result.result[0].answer + return reaction + + def publish_google_chat_card(self, text, space, filename=None): + goal = SendMessageGoal() + goal.text = text + if filename: + goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] + goal.space = space + rospy.logwarn("send {} to {}".format(goal.text, goal.space)) + self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) + + def text_to_salience(self, text): + goal = AnalyzeTextGoal() + goal.text = text; + self.analyze_text_ac.send_goal(goal) + self.analyze_text_ac.wait_for_result() + entity = self.analyze_text_ac.get_result() + if len(entity.entities) > 0: + return entity.entities[0].name + else: + return text + + def translate(self, text, dest): + global translator + loop = 3 + while loop > 0: + try: + ret = translator.translate(text, dest="en") + return ret + except Exception as e: + rospy.logwarn("Faile to translate {}".format(e)) + time.sleep(1) + translator = Translator() + loop = loop - 1 + return Translated(text=text, dest=dest) + + + def cb(self, msg): + space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot + if msg._type == 'google_chat_ros/MessageEvent': + text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() + space = msg.space.name + rospy.logwarn("Received chat message '{}'".format(text)) + else: + rospy.logerr("Unknown message type {}".format(msg._type)) + return + + try: + language = 'English' if text.isascii() else 'Japanese' + if any(x in text for x in ['diary', '日記']): + self.publish_google_chat_card("Sure!", space) + ret = self.make_diary(language) + if 'filename' in ret: + # upload text first, then upload images + self.publish_google_chat_card(ret['text'], space) + self.publish_google_chat_card('', space, ret['filename']) + else: + self.publish_google_chat_card(ret['text'], space) + else: + ret = self.make_response(text, language) + self.publish_google_chat_card(ret['text'], space) + + except Exception as e: + rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) + self.publish_google_chat_card("💀 {}".format(e), space) + +if __name__ == '__main__': + rospy.init_node('test', anonymous=True) + + logger = logging.getLogger('rosout') + logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) + + ml = MessageListener() + ml.make_activity() + rospy.spin() From 66c9dba4bda70b9061d2ecb31a4f1e9ba923c700 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Tue, 21 Nov 2023 19:25:50 +0900 Subject: [PATCH 10/60] add message of datas --- database_talker/scripts/make_aibo_diary.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 295e29f967..2bd22212ed 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -150,6 +150,7 @@ def query_activities(self, start_time, end_time): msg = deserialise_message(msg) meta = json.loads(meta.pairs[0].second) activities.append((msg, meta)) + rospy.logwarn(" Found {} messages".format(len(activities))) return activities def query_activities_days(self, days=7): @@ -181,9 +182,11 @@ def make_aibo_activities_raw(self, activities): for activities in activities: rospy.loginfo("Found {} activities".format(len(activities))) activities_raw = [] + input_topics = [] for msg, meta in activities: state = None timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + input_topics.append(meta['input_topic']) # rospy.logwarn("{} {}".format(timestamp, meta['input_topic'])) if meta['stored_type'] == 'aibo_driver/StringStatus': if msg.status in ['', 'none']: @@ -210,7 +213,10 @@ def make_aibo_activities_raw(self, activities): # create activities_raw for s in state: activities_raw.append((timestamp, s)) - diary_activities_raw.append(activities_raw) + if len(activities_raw) > 0: + rospy.loginfo(" {} {}".format(activities_raw[0][0], activities_raw[-1][0])) + rospy.loginfo(" {}".format({key: input_topics.count(key) for key in set(input_topics)})) + diary_activities_raw.append(activities_raw) ## return diary_activities_raw ## (timestamp, event) @@ -244,9 +250,10 @@ def make_image_activities_raw(self, activities): for msg, meta in activities[0]: if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - answer = msg.result.result.result[0].answer - if answer not in image_activities.keys(): - image_activities.update({answer : timestamp}) + if len(msg.result.result.result) > 0: + answer = msg.result.result.result[0].answer + if answer not in image_activities.keys(): + image_activities.update({answer : timestamp}) # print(image_activities) prompt = "From the list below, please select the most memorable event by number.\n\n" @@ -275,6 +282,7 @@ def make_image_activities_raw(self, activities): def make_activity(self, activities = None): if not activities: activities = self.query_activities_days() + diary_activities_raw = self.make_aibo_activities_raw(activities) ## (timestamp, event) # check today From f3fd4aa54fd035fd015754c4a21537f659b71f57 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 22 Nov 2023 21:19:16 +0900 Subject: [PATCH 11/60] cleanup make_aibo_diary.py --- database_talker/scripts/make_aibo_diary.py | 221 ++++++++++++--------- 1 file changed, 123 insertions(+), 98 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 2bd22212ed..0725493550 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -4,6 +4,8 @@ import rospy import logging +import argparse + import actionlib from bson import json_util # import copy @@ -17,7 +19,7 @@ import re import rospkg # import shutil -# import sys +import sys # import yaml import tempfile # import time @@ -58,6 +60,13 @@ def __init__(self): self.robot_name = rospy.get_param('robot/name') rospy.loginfo("using '{}' database".format(self.robot_name)) + if self.robot_name == 'aibo': + self.query_types = ['aibo_driver/StringStatus', + 'aibo_driver/ObjectStatusArray', + 'jsk_recognition_msgs/VQATaskActionResult'] + else: + self.query_types = ['jsk_recognition_msgs/VQATaskActionResult'] + rospy.loginfo("wait for '/google_chat_ros/send'") self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) self.chat_ros_ac.wait_for_server() @@ -94,8 +103,8 @@ def __init__(self): rospy.loginfo("all done, ready") - def query_multiple_types(self, types, meta_tuple): + "Query mongo messages, returns list of MongoQueryMsgResponse" msgs = MongoQueryMsgResponse() for _type in types: msg = self.query(database = 'jsk_robot_lifelog', @@ -109,41 +118,12 @@ def query_multiple_types(self, types, meta_tuple): msgs.metas.extend(msg.metas) return msgs - def query_activities2(self, start_time, end_time): + def query_mongo_data(self, types, start_time, end_time): + "Query activities for aibo robot, returns list of tuple (msg, meta)" rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) - mongo_msgs = self.query_multiple_types(['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray'], - meta_tuple) - - activities_raw = [] - for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): - msg = deserialise_message(msg) - meta = json.loads(meta.pairs[0].second) - timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - #rospy.logdebug("{} {} {}".format(timestamp, meta['input_topic'], msg.status)) - if meta['stored_type'] == 'aibo_driver/StringStatus': - if msg.status in ['', 'none']: - continue - if 'body_touched' in meta['input_topic']: - activities_raw.append(msg.status+'_touched') - elif 'hungry' in meta['input_topic']: - activities_raw.append('energy_'+msg.status) - elif 'posture' in meta['input_topic']: - activities_raw.append('posture_'+msg.status) - elif 'sleepy' in meta['input_topic']: - activities_raw.append(msg.status) - elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray': - pass - return activities_raw - - def query_activities(self, start_time, end_time): - rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) - meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) - mongo_msgs = self.query_multiple_types(['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray', - 'jsk_recognition_msgs/VQATaskActionResult'], - meta_tuple) + mongo_msgs = self.query_multiple_types(types, meta_tuple) activities = [] for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): @@ -153,7 +133,10 @@ def query_activities(self, start_time, end_time): rospy.logwarn(" Found {} messages".format(len(activities))) return activities - def query_activities_days(self, days=7): + def query_mongo_data_days(self, types=None, days=7): + "Query activities for a week, returns list of list of tuple (msg, meta), if activity is empty of that day, returns empty list" + if types == None: + types = self.query_types # if we found cache file if (os.path.exists(self.pickle_file) and (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours @@ -165,11 +148,11 @@ def query_activities_days(self, days=7): today = datetime.date.today() startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST) for days_before in range(days): - activities_raw = self.query_activities(startdate-datetime.timedelta(hours=days_before*24), + activities_raw = self.query_mongo_data(types, + startdate-datetime.timedelta(hours=days_before*24), startdate-datetime.timedelta(hours=(days_before-1)*24)) - if len(activities_raw): - activities.append(activities_raw) - + activities.append(activities_raw) + # dump msgs with open(self.pickle_file, 'wb') as f: pickle.dump(activities, f) @@ -177,13 +160,18 @@ def query_activities_days(self, days=7): return activities - def make_aibo_activities_raw(self, activities): + def make_aibo_activities_raw(self, mongo_data_days = None): + "Create aibo activities for several days, returns list of list of tuple(temestamp, event)" + # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]] + if not mongo_data_days: + mongo_data_days = self.query_mongo_data_days() diary_activities_raw = [] ## (timestamp, event) - for activities in activities: - rospy.loginfo("Found {} activities".format(len(activities))) + for mongo_data in mongo_data_days: + rospy.loginfo("Found {} mongo data".format(len(mongo_data))) + rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data])))) activities_raw = [] input_topics = [] - for msg, meta in activities: + for msg, meta in mongo_data: state = None timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) input_topics.append(meta['input_topic']) @@ -213,10 +201,12 @@ def make_aibo_activities_raw(self, activities): # create activities_raw for s in state: activities_raw.append((timestamp, s)) + + diary_activities_raw.append(activities_raw) + if len(activities_raw) > 0: - rospy.loginfo(" {} {}".format(activities_raw[0][0], activities_raw[-1][0])) - rospy.loginfo(" {}".format({key: input_topics.count(key) for key in set(input_topics)})) - diary_activities_raw.append(activities_raw) + rospy.loginfo(" period : {} {}".format(activities_raw[-1][0], activities_raw[0][0])) + rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)})) ## return diary_activities_raw ## (timestamp, event) @@ -245,21 +235,28 @@ def _make_activity(self): prompt += "{} : +{}\n".format(event, int(freq_0[event]/ratio) - (freq_1[event] if event in freq_1 else 0)) - def make_image_activities_raw(self, activities): - image_activities = {} - for msg, meta in activities[0]: - if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': - timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - if len(msg.result.result.result) > 0: - answer = msg.result.result.result[0].answer - if answer not in image_activities.keys(): - image_activities.update({answer : timestamp}) + def make_image_activities(self, mongo_data_days = None): + if not mongo_data_days: + mongo_data_days = self.query_mongo_data_days() + + for mongo_data in mongo_data_days: + rospy.loginfo("Found {} mongo data".format(len(mongo_data))) + mongo_data_type = list(set([meta['stored_type'] for _, meta in mongo_data])) + if len(mongo_data_type) > 1 and 'jsk_recognition_msgs/VQATaskActionResult' in mongo_data_type: + rospy.loginfo("Found {} image data".format(len(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data])))) + image_activities = {} + for msg, meta in mongo_data: + if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + if len(msg.result.result.result) > 0: + answer = msg.result.result.result[0].answer + if answer not in image_activities.keys(): + image_activities.update({answer : timestamp}) + break # - print(image_activities) - prompt = "From the list below, please select the most memorable event by number.\n\n" + prompt = "From the list below, please select the most memorable and illuminating events by number.\n\n" n = 0 for answer, timestamp in image_activities.items(): - print("----------------------" + answer) prompt += "{}: {} ({})\n".format(n, answer, timestamp) n += 1 @@ -279,23 +276,28 @@ def make_image_activities_raw(self, activities): self.write_image_with_annotation(filename, results[0], answer) return {'text': answer, 'filename': filename} - def make_activity(self, activities = None): - if not activities: - activities = self.query_activities_days() + def make_activity(self, mongo_data_days = None): + "Returns activity prompts" + if not mongo_data_days: + mongo_data_days = self.query_mongo_data_days() - diary_activities_raw = self.make_aibo_activities_raw(activities) ## (timestamp, event) + # create diary activities_raw + # list of (timestamp, event) [[(temestamp, event), (temestamp, event) ...],[#for 2nd day],[#for 3rd day]...] + diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) - # check today + # just show information diary_activities_freq = [] for activities_raw in diary_activities_raw: activities_raw_state = [x[1] for x in activities_raw] activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} + rospy.logwarn("Found {} activity data".format(len(activities_raw))) if len(activities_raw) > 0: - # rospy.loginfo("activities raw : {}".format(activities_raw)) - rospy.loginfo("activities freq : {} ({})".format(activities_freq, len(activities_freq))) + rospy.logwarn(" period : {} {}".format(activities_raw[-1][0], activities_raw[0][0])) + rospy.logwarn(" freq : {} ({})".format(activities_freq, len(activities_freq))) diary_activities_freq.append(activities_freq) - # activities_events[event_name] = {'duration', datetime.timedelta} + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} diary_activities_events = [] for activities_raw in diary_activities_raw: activities_events = {} @@ -314,32 +316,44 @@ def make_activity(self, activities = None): activities_events[event]['duration'] += activities_events[event]['tmp_duration'] activities_events[event]['tmp_duration'] = datetime.timedelta() activities_events[event]['last_seen'] = timestamp + activities_events[event]['count'] += 1 else: - activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta()}}) - #print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) + activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(), 'count': 0}}) + # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) diary_activities_events.append(activities_events) for activities_events in diary_activities_events: print("--") for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): - print("{:24} : {:4.2f} min".format(event, duration['duration'].seconds/60)) - + print("{:24} : {:4.2f} min ({} times)".format(event, duration['duration'].seconds/60, duration['count'])) + + # flatten list + activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates + + # percentages of activities happend prompt = "" - prompt += "\n 'action : duration'\n" - activities_events = diary_activities_events[0] # get todays activities - for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): - if duration['duration'].seconds > 0: - prompt += "{} : {} min\n".format(event, int(duration['duration'].seconds/60)) + prompt += "\n 'action : time'\n" + + # sort activities event by it's occurence [list] -> sorted({key: count}) + activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) + for event, count in activities_events_freq: + if count/float(len(diary_activities_events)) > 0.5: + prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) # estimate frequence in 24h - prompt += "\n 'action : increased time time than yesterday'\n" + prompt += "\n 'action : increase from the number of time done yesterday'\n" + more_yesterday_action = False - for event in diary_activities_events[0].keys(): - if event in diary_activities_events[1]: - duration = diary_activities_events[0][event]['duration'] - diary_activities_events[1][event]['duration'] - if duration.days > 0 and duration.seconds > 0: - prompt += "{} : {} min\n".format(event, int(duration.seconds/60)) - more_yesterday_action = True + diary_activities_events_no_empty = list(filter(None, diary_activities_events)) + if len(diary_activities_events_no_empty) >= 2: + l0 = diary_activities_events_no_empty[0] + l1 = diary_activities_events_no_empty[1] + for event in set(activities_events): + if event in l0 and event in l1: + increase = l0[event]['count'] - l1[event]['count'] + if increase > 0: + prompt += "{} : +{}\n".format(event, increase) + more_yesterday_action = True if not more_yesterday_action: prompt += "none\n" @@ -361,24 +375,28 @@ def make_activity(self, activities = None): rospy.logdebug(prompt) return prompt - + def make_diary(self, language="Japanese"): - activities = self.query_activities_days() - # + "make dirary" + # get mongo data for 7 days + mongo_data_days = self.query_mongo_data_days() + + # get most impressive image and text topic_of_day = None - filname = False - image_activity = self.make_image_activities_raw(activities) + _filename = False + image_activity = self.make_image_activities(mongo_data_days) if image_activity: topic_of_day = image_activity['text'] filename = image_activity['filename'] - # + + # create prompt prompt = "You are a baby robot. You were taken care of by people around you." if topic_of_day: prompt = "Today, you are impressed by " + topic_of_day + "." prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" - prompt += self.make_activity(activities) - + prompt += self.make_activity(mongo_data_days) + response = self.openai_completion(prompt) rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("response = {}".format(response)) @@ -523,17 +541,18 @@ def openai_completion(self, prompt, temperature=0): def write_image_with_annotation(self, filename, best_result, prompt): image = bridge.compressed_imgmsg_to_cv2(best_result['image']) + _, width, _ = image.shape + scale = width/640.0 if 'label' in best_result and 'similarities' in best_result: cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), - (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 8, 1) + (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 8, 1) cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), - (10,20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 2, 1) - h, w, c = image.shape - string_width = int(w/10) + (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 2, 1) + string_width = 70 for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length text = prompt[i:i+string_width] - cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255,255,255), 4, 1) - cv2.putText(image, text, (10,43+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0,0,0), 1, 1) + cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 4, 1) + cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 1, 1) cv2.imwrite(filename, image) rospy.logwarn("save images to {}".format(filename)) @@ -716,11 +735,17 @@ def cb(self, msg): self.publish_google_chat_card("💀 {}".format(e), space) if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--test', action='store_true') + args = parser.parse_args() + rospy.init_node('test', anonymous=True) logger = logging.getLogger('rosout') logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) - + ml = MessageListener() - ml.make_activity() + if args.test: + ml.make_diary() + sys.exit(0) rospy.spin() From b33fa06c276d5d1d1ea4b34c799d6e69dfdb803a Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 22 Nov 2023 21:51:54 +0900 Subject: [PATCH 12/60] check if answer length --- database_talker/scripts/make_aibo_diary.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 0725493550..6e48a3fa50 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -250,11 +250,11 @@ def make_image_activities(self, mongo_data_days = None): timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) if len(msg.result.result.result) > 0: answer = msg.result.result.result[0].answer - if answer not in image_activities.keys(): + if len(answer.split()) > 3 and answer not in image_activities.keys(): image_activities.update({answer : timestamp}) break # - prompt = "From the list below, please select the most memorable and illuminating events by number.\n\n" + prompt = "From the list below, please select the most memorable and illuminating event by number.\n\n" n = 0 for answer, timestamp in image_activities.items(): prompt += "{}: {} ({})\n".format(n, answer, timestamp) @@ -266,6 +266,9 @@ def make_image_activities(self, mongo_data_days = None): n = re.search(r'(\d+)', response) if n: answer, timestamp = list(image_activities.items())[max(int(n.group(1)),len(image_activities)-1)] + rospy.loginfo("topic of the day") + rospy.loginfo(" answer : {}".format(answer)) + rospy.loginfo(" timestamp : {}".format(timestamp)) results = self.query_images_and_classify(query = answer, start_time = timestamp - datetime.timedelta(minutes=5), end_time = timestamp + datetime.timedelta(minutes=5), From ea37202ae5a789d8c530ca1c774987785b677815 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 22 Nov 2023 21:58:06 +0900 Subject: [PATCH 13/60] fix for python3 --- database_talker/scripts/make_aibo_diary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 6e48a3fa50..94478e5b09 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -243,7 +243,7 @@ def make_image_activities(self, mongo_data_days = None): rospy.loginfo("Found {} mongo data".format(len(mongo_data))) mongo_data_type = list(set([meta['stored_type'] for _, meta in mongo_data])) if len(mongo_data_type) > 1 and 'jsk_recognition_msgs/VQATaskActionResult' in mongo_data_type: - rospy.loginfo("Found {} image data".format(len(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data])))) + rospy.loginfo("Found {} image data".format(len(list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data]))))) image_activities = {} for msg, meta in mongo_data: if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': From fb3921ce8ff2e04380cb3848e58331dd553bea2e Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 22 Nov 2023 23:03:04 +0900 Subject: [PATCH 14/60] check if complition returns null text --- database_talker/scripts/make_aibo_diary.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 94478e5b09..9da5563230 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -265,7 +265,8 @@ def make_image_activities(self, mongo_data_days = None): rospy.loginfo("response = {}".format(response)) n = re.search(r'(\d+)', response) if n: - answer, timestamp = list(image_activities.items())[max(int(n.group(1)),len(image_activities)-1)] + no = min(int(n.group(1)), len(image_activities)-1) + answer, timestamp = list(image_activities.items())[no] rospy.loginfo("topic of the day") rospy.loginfo(" answer : {}".format(answer)) rospy.loginfo(" timestamp : {}".format(timestamp)) @@ -371,7 +372,7 @@ def make_activity(self, mongo_data_days = None): else: break if n >= 2: - prompt += "{} : {} days\n".format(event, n) + prompt += "{} : {}\n".format(event, n) long_time_action = True if not long_time_action: prompt += "none\n" @@ -404,8 +405,8 @@ def make_diary(self, language="Japanese"): rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("response = {}".format(response)) - #prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write a maximum 120 {language} charactors.\n\n".format(language = language) + response - prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response + prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write a maximum 120 {language} charactors.\n\n".format(language = language) + response + # prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response response = self.openai_completion(prompt) rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("response = {}".format(response)) @@ -530,6 +531,11 @@ def openai_completion(self, prompt, temperature=0): while loop < 5 and result is None: try: result = self.completion(prompt=prompt,temperature=temperature) + if result.text == '': + rospy.logwarn(result) + rospy.logwarn("result text is too short, retry completion") + rospy.sleep(2) + resut = None except rospy.ServiceException as e: rospy.logerr("Service call failed: %s"%e) rospy.sleep(2) From e9debced7e718aa29096d5c31d1e52b4b4941e0c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Tue, 28 Nov 2023 10:47:24 +0900 Subject: [PATCH 15/60] add launch/aibo_example.launch to work with ./make_aibo_diary.py --test --- database_talker/aibo.rosinstall | 41 ++++++++++++++++++++++ database_talker/launch/aibo_example.launch | 19 ++++++++++ database_talker/scripts/make_aibo_diary.py | 22 ++++++++---- 3 files changed, 76 insertions(+), 6 deletions(-) create mode 100644 database_talker/aibo.rosinstall create mode 100644 database_talker/launch/aibo_example.launch diff --git a/database_talker/aibo.rosinstall b/database_talker/aibo.rosinstall new file mode 100644 index 0000000000..e08be63701 --- /dev/null +++ b/database_talker/aibo.rosinstall @@ -0,0 +1,41 @@ +# database talker for aibo +- git: + local-name: jsk_demos + uri: https://github.com/sktometometo/jsk_demos + version: PR/hoge-py +# use k-okada' version of aibo_driver (for message) +- git: + local-name: aibo_driver + uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/k-okada/aibo_status + version: driver +# +# use latest google_chat_ros, until noetic is synced +# armhf users need to use https://github.com/k-okada/jsk_3rdparty/commit/8524aaa4118cb7ab02c65f47c46343bbb4f7147c +# rosinstall_generator google_chat_ros --rosdistro noetic +# +- git: + local-name: google_chat_ros + uri: https://github.com/tork-a/jsk_3rdparty-release.git + version: release/noetic/google_chat_ros/2.1.28-1 +# +# use latest jsk_recognition_msgs, until noetic is synced +# +- git: + local-name: jsk_recognition/jsk_recognition_msgs + uri: https://github.com/tork-a/jsk_recognition-release.git + version: release/noetic/jsk_recognition_msgs/1.2.17-1 +# +# need releasing +# https://github.com/davesarmoury/openai_ros/pulls?q=is%3Apr +# +- git: + local-name: openai_ros + uri: https://github.com/davesarmoury/openai_ros +# +# add sample launch code for database_talker #1792 +# https://github.com/jsk-ros-pkg/jsk_robot/pull/1792 +# +- git: + local-name: jsk_robot + uri: https://github.com/k-okada/jsk_robot.git + version: ichikura_sample diff --git a/database_talker/launch/aibo_example.launch b/database_talker/launch/aibo_example.launch new file mode 100644 index 0000000000..ef1dc43026 --- /dev/null +++ b/database_talker/launch/aibo_example.launch @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 9da5563230..3ddaf32ce2 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -54,7 +54,7 @@ class MessageListener(object): - def __init__(self): + def __init__(self, wait_for_chat_server=True): #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') self.pickle_file = "/tmp/activities.pickle" self.robot_name = rospy.get_param('robot/name') @@ -69,7 +69,8 @@ def __init__(self): rospy.loginfo("wait for '/google_chat_ros/send'") self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) - self.chat_ros_ac.wait_for_server() + if wait_for_chat_server: + self.chat_ros_ac.wait_for_server() rospy.loginfo("wait for '/message_store/query_messages'") rospy.wait_for_service('/message_store/query_messages') @@ -242,8 +243,12 @@ def make_image_activities(self, mongo_data_days = None): for mongo_data in mongo_data_days: rospy.loginfo("Found {} mongo data".format(len(mongo_data))) mongo_data_type = list(set([meta['stored_type'] for _, meta in mongo_data])) + if (len(mongo_data)) > 0: + from_date = datetime.datetime.fromtimestamp(mongo_data[-1][1]['timestamp']//1000000000, JST) + to_date = datetime.datetime.fromtimestamp(mongo_data[0][1]['timestamp']//1000000000, JST) + rospy.logwarn(" period : {} {}".format(from_date, to_date)) if len(mongo_data_type) > 1 and 'jsk_recognition_msgs/VQATaskActionResult' in mongo_data_type: - rospy.loginfo("Found {} image data".format(len(list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data]))))) + rospy.loginfo(" : {} image data".format(len(list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data]))))) image_activities = {} for msg, meta in mongo_data: if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': @@ -252,7 +257,11 @@ def make_image_activities(self, mongo_data_days = None): answer = msg.result.result.result[0].answer if len(answer.split()) > 3 and answer not in image_activities.keys(): image_activities.update({answer : timestamp}) - break + if (len(image_activities)) > 0: + break + else: + rospy.logwarn(" no valid image description is found...") + # prompt = "From the list below, please select the most memorable and illuminating event by number.\n\n" n = 0 @@ -753,8 +762,9 @@ def cb(self, msg): logger = logging.getLogger('rosout') logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) - ml = MessageListener() + ml = MessageListener(wait_for_chat_server=not args.test) if args.test: - ml.make_diary() + ret = ml.make_diary() + rospy.loginfo("image is saved at {}".format(ret['filename'])) sys.exit(0) rospy.spin() From 9226cc4667efcdf486ff2773f79409924fe51b87 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 29 Nov 2023 12:46:19 +0900 Subject: [PATCH 16/60] scripts/make_aibo_diary.py: cleanup code --- database_talker/scripts/make_aibo_diary.py | 199 ++++++++++----------- 1 file changed, 96 insertions(+), 103 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 3ddaf32ce2..9e4c43ba5b 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -17,6 +17,7 @@ # import random import pickle import re +import random import rospkg # import shutil import sys @@ -168,12 +169,12 @@ def make_aibo_activities_raw(self, mongo_data_days = None): mongo_data_days = self.query_mongo_data_days() diary_activities_raw = [] ## (timestamp, event) for mongo_data in mongo_data_days: - rospy.loginfo("Found {} mongo data".format(len(mongo_data))) + rospy.loginfo("Found {} mongo data (make_aibo_activities_raw)".format(len(mongo_data))) rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data])))) activities_raw = [] input_topics = [] for msg, meta in mongo_data: - state = None + state = [] timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) input_topics.append(meta['input_topic']) # rospy.logwarn("{} {}".format(timestamp, meta['input_topic'])) @@ -197,126 +198,46 @@ def make_aibo_activities_raw(self, mongo_data_days = None): elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray': # remove duplicates from list https://stackoverflow.com/questions/7961363/removing-duplicates-in-lists state = list(set(['found ' + state.name for state in msg.status])) + elif meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': + if len(msg.result.result.result) > 0: + answer = msg.result.result.result[0].answer + if len(answer.split()) > 3: + state = [answer] else: continue # create activities_raw for s in state: - activities_raw.append((timestamp, s)) + activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']}) diary_activities_raw.append(activities_raw) if len(activities_raw) > 0: - rospy.loginfo(" period : {} {}".format(activities_raw[-1][0], activities_raw[0][0])) + rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)})) ## return diary_activities_raw ## (timestamp, event) - def _make_activity(self): - # merge a list of dicts into a single dict - # https://stackoverflow.com/a/3495395 - always_events = {} - for key in {k: v for d in diary_activities_freq for k, v in d.items()}.keys(): - n = 0 - for k in diary_activities_freq: - n += 1 if k.get(key) else 0 - always_events.update({key : n/float(len(diary_activities_freq))}) - - noseen_events = {} - for key in {k: v for d in diary_activities_freq for k, v in d.items()}.keys(): - n = 0 - for k in diary_activities_freq: - if k.get(key): break - n += 1 - noseen_events.update({key: n}) - - for event in diary_activities_freq[0]: - freq_0 = diary_activities_freq[0] - freq_1 = diary_activities_freq[1] - if (not (event in freq_1 and int(freq_0[event]/ratio) <= freq_1[event])): - prompt += "{} : +{}\n".format(event, - int(freq_0[event]/ratio) - (freq_1[event] if event in freq_1 else 0)) - - def make_image_activities(self, mongo_data_days = None): - if not mongo_data_days: - mongo_data_days = self.query_mongo_data_days() - - for mongo_data in mongo_data_days: - rospy.loginfo("Found {} mongo data".format(len(mongo_data))) - mongo_data_type = list(set([meta['stored_type'] for _, meta in mongo_data])) - if (len(mongo_data)) > 0: - from_date = datetime.datetime.fromtimestamp(mongo_data[-1][1]['timestamp']//1000000000, JST) - to_date = datetime.datetime.fromtimestamp(mongo_data[0][1]['timestamp']//1000000000, JST) - rospy.logwarn(" period : {} {}".format(from_date, to_date)) - if len(mongo_data_type) > 1 and 'jsk_recognition_msgs/VQATaskActionResult' in mongo_data_type: - rospy.loginfo(" : {} image data".format(len(list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' in x['stored_type'], [meta for _, meta in mongo_data]))))) - image_activities = {} - for msg, meta in mongo_data: - if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': - timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - if len(msg.result.result.result) > 0: - answer = msg.result.result.result[0].answer - if len(answer.split()) > 3 and answer not in image_activities.keys(): - image_activities.update({answer : timestamp}) - if (len(image_activities)) > 0: - break - else: - rospy.logwarn(" no valid image description is found...") - - # - prompt = "From the list below, please select the most memorable and illuminating event by number.\n\n" - n = 0 - for answer, timestamp in image_activities.items(): - prompt += "{}: {} ({})\n".format(n, answer, timestamp) - n += 1 - - response = self.openai_completion(prompt) - rospy.loginfo("prompt = {}".format(prompt)) - rospy.loginfo("response = {}".format(response)) - n = re.search(r'(\d+)', response) - if n: - no = min(int(n.group(1)), len(image_activities)-1) - answer, timestamp = list(image_activities.items())[no] - rospy.loginfo("topic of the day") - rospy.loginfo(" answer : {}".format(answer)) - rospy.loginfo(" timestamp : {}".format(timestamp)) - results = self.query_images_and_classify(query = answer, - start_time = timestamp - datetime.timedelta(minutes=5), - end_time = timestamp + datetime.timedelta(minutes=5), - classify = False) - if len(results) > 0: - # pubish as card - filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) - self.write_image_with_annotation(filename, results[0], answer) - return {'text': answer, 'filename': filename} - - def make_activity(self, mongo_data_days = None): - "Returns activity prompts" - if not mongo_data_days: - mongo_data_days = self.query_mongo_data_days() - - # create diary activities_raw - # list of (timestamp, event) [[(temestamp, event), (temestamp, event) ...],[#for 2nd day],[#for 3rd day]...] - diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) - - # just show information + def make_state_frequency(self, diary_activities_raw, message): diary_activities_freq = [] for activities_raw in diary_activities_raw: - activities_raw_state = [x[1] for x in activities_raw] + activities_raw_state = [x['state'] for x in [x for x in activities_raw if message in x['type']]] activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} - rospy.logwarn("Found {} activity data".format(len(activities_raw))) + rospy.logwarn("Found {} activity data (make_state_frequency)".format(len(activities_raw))) if len(activities_raw) > 0: - rospy.logwarn(" period : {} {}".format(activities_raw[-1][0], activities_raw[0][0])) + rospy.logwarn(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) rospy.logwarn(" freq : {} ({})".format(activities_freq, len(activities_freq))) diary_activities_freq.append(activities_freq) + return diary_activities_freq - # create activities event data - # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + def make_activities_events(self, diary_activities_raw, message): diary_activities_events = [] for activities_raw in diary_activities_raw: activities_events = {} for activities in activities_raw: - timestamp = activities[0] - event = activities[1] + timestamp = activities['timestamp'] + event = activities['state'] + if message not in activities['type']: + continue if event in activities_events: time_since_last_seen = activities_events[event]['last_seen'] - timestamp if time_since_last_seen.seconds/60 < 30: # min @@ -334,6 +255,77 @@ def make_activity(self, mongo_data_days = None): activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(), 'count': 0}}) # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) diary_activities_events.append(activities_events) + return diary_activities_events + + def make_image_activities(self, diary_activities_raw = None): + if not diary_activities_raw: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) + + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + # diary_activities_events = [activities_events for day1, activities_events for day2, ....] + diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult') + + image_activities = {} + for activities_raw in diary_activities_raw: + for activities in activities_raw: + if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult': + continue + timestamp = activities['timestamp'] + answer = activities['state'] + print(answer) + if len(answer.split()) > 3 and answer not in image_activities.keys(): + image_activities.update({answer : timestamp}) + if (len(image_activities)) > 0: + break + else: + rospy.logwarn(" no valid image description is found...") + # + prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n" + n = 0 + for answer, timestamp in image_activities.items(): + prompt += "{}: {} ({})\n".format(n, answer, timestamp) + n += 1 + + response = self.openai_completion(prompt) + n = re.search(r'(\d+)', response) + if n: + no = min(int(n.group(1)), len(image_activities)-1) + else: + no = random.randrange(len(image_activities)) + + answer, timestamp = list(image_activities.items())[no] + rospy.loginfo("topic of the day") + rospy.loginfo(" answer : {}".format(answer)) + rospy.loginfo(" timestamp : {}".format(timestamp)) + results = self.query_images_and_classify(query = answer, + start_time = timestamp - datetime.timedelta(minutes=5), + end_time = timestamp + datetime.timedelta(minutes=5), + classify = False) + if len(results) > 0: + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, results[0], answer) + return {'text': answer, 'filename': filename} + + def make_activity(self, diary_activities_raw = None): + "Returns activity prompts" + # create diary activities_raw + # list of (timestamp, event) [[{'temestamp': , 'state':, 'type': }, {'temestamp': , 'state':, 'type': } ...],[#for 2nd day],[#for 3rd day]...] + if not diary_activities_raw: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) + + # make frequencey data for 7days + # activities_freq {'event_1' : count, 'event_2' : count} + # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...] + diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'aibo_driver/') + + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + # diary_activities_events = [activities_events for day1, activities_events for day2, ....] + diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') for activities_events in diary_activities_events: print("--") @@ -344,7 +336,7 @@ def make_activity(self, mongo_data_days = None): activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates # percentages of activities happend - prompt = "" + prompt = "{}\n\n".format(filter(None, diary_activities_events)[0].items()[0][1]['last_seen'].strftime("%a %d %b %Y")) prompt += "\n 'action : time'\n" # sort activities event by it's occurence [list] -> sorted({key: count}) @@ -393,11 +385,12 @@ def make_diary(self, language="Japanese"): "make dirary" # get mongo data for 7 days mongo_data_days = self.query_mongo_data_days() - + diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) # get most impressive image and text topic_of_day = None _filename = False - image_activity = self.make_image_activities(mongo_data_days) + + image_activity = self.make_image_activities(diary_activities_raw) if image_activity: topic_of_day = image_activity['text'] filename = image_activity['filename'] @@ -408,7 +401,7 @@ def make_diary(self, language="Japanese"): prompt = "Today, you are impressed by " + topic_of_day + "." prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" - prompt += self.make_activity(mongo_data_days) + prompt += self.make_activity(diary_activities_raw) response = self.openai_completion(prompt) rospy.loginfo("prompt = {}".format(prompt)) From 27d025f006f8a45c9fb2e02561d9f846f0caea7c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 29 Nov 2023 13:20:04 +0900 Subject: [PATCH 17/60] fix to check all image_activities in same day --- database_talker/scripts/make_aibo_diary.py | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 9e4c43ba5b..9b84e91214 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -274,13 +274,12 @@ def make_image_activities(self, diary_activities_raw = None): continue timestamp = activities['timestamp'] answer = activities['state'] - print(answer) if len(answer.split()) > 3 and answer not in image_activities.keys(): image_activities.update({answer : timestamp}) - if (len(image_activities)) > 0: - break - else: - rospy.logwarn(" no valid image description is found...") + if (len(image_activities)) > 0: + break + else: + rospy.logwarn(" no valid image description is found...") # prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n" n = 0 From 1dee2300465ac1b278af2a9013309f144cbb4fda Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Wed, 29 Nov 2023 20:09:02 +0900 Subject: [PATCH 18/60] [database_talker] update make_aibo_diary.py for spot support --- database_talker/scripts/make_aibo_diary.py | 34 +++++++++++++++++++--- 1 file changed, 30 insertions(+), 4 deletions(-) mode change 100755 => 100644 database_talker/scripts/make_aibo_diary.py diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py old mode 100755 new mode 100644 index 9b84e91214..98f402a247 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -53,6 +53,13 @@ from openai_ros.srv import Completion, CompletionResponse +def is_ascii(string): + try: + string.encode('ascii') + return True + except UnicodeEncodeError: + return False + class MessageListener(object): def __init__(self, wait_for_chat_server=True): @@ -65,6 +72,10 @@ def __init__(self, wait_for_chat_server=True): self.query_types = ['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray', 'jsk_recognition_msgs/VQATaskActionResult'] + elif self.robot_name == 'BelKa': + self.query_types = ['spot_msgs/Feedback', + 'spot_msgs/ManipulatorState', + 'jsk_recognition_msgs/VQATaskActionResult'] else: self.query_types = ['jsk_recognition_msgs/VQATaskActionResult'] @@ -198,12 +209,25 @@ def make_aibo_activities_raw(self, mongo_data_days = None): elif meta['stored_type'] == 'aibo_driver/ObjectStatusArray': # remove duplicates from list https://stackoverflow.com/questions/7961363/removing-duplicates-in-lists state = list(set(['found ' + state.name for state in msg.status])) + elif meta['stored_type'] == 'spot_msgs/Feedback': + state = [] + if msg.standing: + state.append("standing") + if msg.sitting: + state.append("sitting") + if msg.moving: + state.append("moving") + elif meta['stored_type'] == 'spot_msgs/ManipulatorState': + state = [] + if msg.is_gripper_holding_item: + state.append("holding_item") elif meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': if len(msg.result.result.result) > 0: answer = msg.result.result.result[0].answer if len(answer.split()) > 3: state = [answer] else: + rospy.logwarn("Unknown stored type: {}".format(meta['stored_type'])) continue # create activities_raw for s in state: @@ -319,12 +343,14 @@ def make_activity(self, diary_activities_raw = None): # make frequencey data for 7days # activities_freq {'event_1' : count, 'event_2' : count} # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...] - diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'aibo_driver/') + #diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'aibo_driver/') + diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'spot_msgs/') # create activities event data # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} # diary_activities_events = [activities_events for day1, activities_events for day2, ....] - diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') + #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') + diary_activities_events = self.make_activities_events(diary_activities_raw, 'spot_msgs/') for activities_events in diary_activities_events: print("--") @@ -335,7 +361,7 @@ def make_activity(self, diary_activities_raw = None): activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates # percentages of activities happend - prompt = "{}\n\n".format(filter(None, diary_activities_events)[0].items()[0][1]['last_seen'].strftime("%a %d %b %Y")) + prompt = "{}\n\n".format(list(list(filter(None, diary_activities_events))[0].items())[0][1]['last_seen'].strftime("%a %d %b %Y")) prompt += "\n 'action : time'\n" # sort activities event by it's occurence [list] -> sorted({key: count}) @@ -726,7 +752,7 @@ def cb(self, msg): return try: - language = 'English' if text.isascii() else 'Japanese' + language = 'English' if is_ascii(text) else 'Japanese' if any(x in text for x in ['diary', '日記']): self.publish_google_chat_card("Sure!", space) ret = self.make_diary(language) From afa8f51e220e0178eaf6462cffd3726b133c1c6c Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Wed, 29 Nov 2023 20:11:04 +0900 Subject: [PATCH 19/60] [database_talker] add executable permission to make_aibo_diary.py --- database_talker/scripts/make_aibo_diary.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) mode change 100644 => 100755 database_talker/scripts/make_aibo_diary.py diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py old mode 100644 new mode 100755 From 6d211cd3557d88d5032b3efc4dd2cd7426ac238b Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Wed, 29 Nov 2023 20:21:02 +0900 Subject: [PATCH 20/60] [database_talker] support roslaunch for make_aibo_diary.py --- database_talker/scripts/make_aibo_diary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 98f402a247..4c7ed59c7b 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -773,7 +773,7 @@ def cb(self, msg): if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--test', action='store_true') - args = parser.parse_args() + args = parser.parse_args(rospy.myargv()[1:]) rospy.init_node('test', anonymous=True) From 96ca9b5d239f247ff68ca6202706e533bf30710e Mon Sep 17 00:00:00 2001 From: Koki Shinjo Date: Wed, 29 Nov 2023 20:22:23 +0900 Subject: [PATCH 21/60] [database_talker] use python3 for make_aibo_diary.py --- database_talker/scripts/make_aibo_diary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 4c7ed59c7b..1b67f6d4b9 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python3 # -*- coding: utf-8 -*- import rospy From 830d364cf7d8b7ff55b420175caa3fb91eeb6d6a Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 29 Nov 2023 22:50:49 +0900 Subject: [PATCH 22/60] fix to work on both python2/python3, use robot/type to check filtered messages --- database_talker/launch/aibo_example.launch | 1 + database_talker/scripts/make_aibo_diary.py | 49 +++++++++++++--------- 2 files changed, 31 insertions(+), 19 deletions(-) diff --git a/database_talker/launch/aibo_example.launch b/database_talker/launch/aibo_example.launch index ef1dc43026..19c6621f11 100644 --- a/database_talker/launch/aibo_example.launch +++ b/database_talker/launch/aibo_example.launch @@ -1,5 +1,6 @@ + diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 1b67f6d4b9..fb47ae357e 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python3 +#!/usr/bin/env python # -*- coding: utf-8 -*- import rospy @@ -53,26 +53,31 @@ from openai_ros.srv import Completion, CompletionResponse -def is_ascii(string): - try: - string.encode('ascii') - return True - except UnicodeEncodeError: - return False +# https://stackoverflow.com/questions/196345/how-to-check-if-a-string-in-python-is-in-ascii +def is_ascii(s): + return all(ord(c) < 128 for c in s) + +# https://www.newscatcherapi.com/blog/ultimate-guide-to-text-similarity-with-python +def jaccard_similarity(x,y): + """ returns the jaccard similarity between two lists """ + intersection_cardinality = len(set.intersection(*[set(x), set(y)])) + union_cardinality = len(set.union(*[set(x), set(y)])) + return intersection_cardinality/float(union_cardinality) class MessageListener(object): def __init__(self, wait_for_chat_server=True): #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') self.pickle_file = "/tmp/activities.pickle" + self.robot_type = rospy.get_param('robot/type') self.robot_name = rospy.get_param('robot/name') rospy.loginfo("using '{}' database".format(self.robot_name)) - if self.robot_name == 'aibo': + if self.robot_type == 'aibo': self.query_types = ['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray', 'jsk_recognition_msgs/VQATaskActionResult'] - elif self.robot_name == 'BelKa': + elif self.robot_type == 'spot': self.query_types = ['spot_msgs/Feedback', 'spot_msgs/ManipulatorState', 'jsk_recognition_msgs/VQATaskActionResult'] @@ -241,10 +246,11 @@ def make_aibo_activities_raw(self, mongo_data_days = None): ## return diary_activities_raw ## (timestamp, event) - def make_state_frequency(self, diary_activities_raw, message): + def make_state_frequency(self, diary_activities_raw): + message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) diary_activities_freq = [] for activities_raw in diary_activities_raw: - activities_raw_state = [x['state'] for x in [x for x in activities_raw if message in x['type']]] + activities_raw_state = [x['state'] for x in [x for x in activities_raw if x['type'] in message]] activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} rospy.logwarn("Found {} activity data (make_state_frequency)".format(len(activities_raw))) if len(activities_raw) > 0: @@ -253,14 +259,16 @@ def make_state_frequency(self, diary_activities_raw, message): diary_activities_freq.append(activities_freq) return diary_activities_freq - def make_activities_events(self, diary_activities_raw, message): + def make_activities_events(self, diary_activities_raw, message = None): + if not message: + message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) diary_activities_events = [] for activities_raw in diary_activities_raw: activities_events = {} for activities in activities_raw: timestamp = activities['timestamp'] event = activities['state'] - if message not in activities['type']: + if activities['type'] not in message: continue if event in activities_events: time_since_last_seen = activities_events[event]['last_seen'] - timestamp @@ -298,7 +306,8 @@ def make_image_activities(self, diary_activities_raw = None): continue timestamp = activities['timestamp'] answer = activities['state'] - if len(answer.split()) > 3 and answer not in image_activities.keys(): + if len(answer.split()) > 3 and \ + max([jaccard_similarity(x, answer) for x in image_activities.keys()]+[0]) < 0.85: image_activities.update({answer : timestamp}) if (len(image_activities)) > 0: break @@ -313,9 +322,12 @@ def make_image_activities(self, diary_activities_raw = None): response = self.openai_completion(prompt) n = re.search(r'(\d+)', response) + no = len(image_activities) if n: - no = min(int(n.group(1)), len(image_activities)-1) - else: + no = int(n.group(1)) + + if no >= len(image_activities): + rospy.loginfo("no is {}, so use random....".format(no)) no = random.randrange(len(image_activities)) answer, timestamp = list(image_activities.items())[no] @@ -343,14 +355,13 @@ def make_activity(self, diary_activities_raw = None): # make frequencey data for 7days # activities_freq {'event_1' : count, 'event_2' : count} # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...] - #diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'aibo_driver/') - diary_activities_freq = self.make_state_frequency(diary_activities_raw, 'spot_msgs/') + diary_activities_freq = self.make_state_frequency(diary_activities_raw) # create activities event data # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} # diary_activities_events = [activities_events for day1, activities_events for day2, ....] #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') - diary_activities_events = self.make_activities_events(diary_activities_raw, 'spot_msgs/') + diary_activities_events = self.make_activities_events(diary_activities_raw) for activities_events in diary_activities_events: print("--") From 56ef986a8726d43d21e228120b32615a9ce5373b Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 29 Nov 2023 23:12:12 +0900 Subject: [PATCH 23/60] add code when too long prompt for image_activities --- database_talker/scripts/make_aibo_diary.py | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index fb47ae357e..8781fd1ef0 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -320,11 +320,15 @@ def make_image_activities(self, diary_activities_raw = None): prompt += "{}: {} ({})\n".format(n, answer, timestamp) n += 1 - response = self.openai_completion(prompt) - n = re.search(r'(\d+)', response) + # Avoid error 'This model's maximum context length is 4097 tokens, however you requested 5464 tokens (4952 in your prompt; 512 for the completion). Please reduce your prompt' no = len(image_activities) - if n: - no = int(n.group(1)) + if len(prompt) + 512 < 4097: + response = self.openai_completion(prompt) + n = re.search(r'(\d+)', response) + if n: + no = int(n.group(1)) + else: + rospy.logwarn("too long prompt...") if no >= len(image_activities): rospy.loginfo("no is {}, so use random....".format(no)) From ead886144cad33604a3a2eac70d51bfc5e5fd9d2 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 15 Dec 2023 16:03:17 +0900 Subject: [PATCH 24/60] davesarmoury remove catkin_virutalenv, so use k-okada/openai_ros for now --- database_talker/aibo.rosinstall | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/database_talker/aibo.rosinstall b/database_talker/aibo.rosinstall index e08be63701..fa55a7c200 100644 --- a/database_talker/aibo.rosinstall +++ b/database_talker/aibo.rosinstall @@ -30,7 +30,8 @@ # - git: local-name: openai_ros - uri: https://github.com/davesarmoury/openai_ros + # uri: https://github.com/davesarmoury/openai_ros + uri: https://github.com/k-okada/openai_ros # # add sample launch code for database_talker #1792 # https://github.com/jsk-ros-pkg/jsk_robot/pull/1792 From c712f63ba60081b4ee2257a99a9ef00fc7de36bc Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sat, 13 Jan 2024 17:03:29 +0900 Subject: [PATCH 25/60] fix when no data is stored MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fix 💀 empty range for randrange() --- database_talker/scripts/make_aibo_diary.py | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 8781fd1ef0..aea7eb4559 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -163,7 +163,7 @@ def query_mongo_data_days(self, types=None, days=7): return pickle.load(f) activities = [] - today = datetime.date.today() + today = datetime.date.today() ## for debug ... -> - datetime.timedelta(hours=24) startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST) for days_before in range(days): activities_raw = self.query_mongo_data(types, @@ -314,6 +314,9 @@ def make_image_activities(self, diary_activities_raw = None): else: rospy.logwarn(" no valid image description is found...") # + if len(image_activities) == 0: + return {} + prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n" n = 0 for answer, timestamp in image_activities.items(): @@ -375,6 +378,8 @@ def make_activity(self, diary_activities_raw = None): # flatten list activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates + if len(activities_events) == 0: + return "" # percentages of activities happend prompt = "{}\n\n".format(list(list(filter(None, diary_activities_events))[0].items())[0][1]['last_seen'].strftime("%a %d %b %Y")) prompt += "\n 'action : time'\n" @@ -428,7 +433,7 @@ def make_diary(self, language="Japanese"): diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) # get most impressive image and text topic_of_day = None - _filename = False + filename = False image_activity = self.make_image_activities(diary_activities_raw) if image_activity: @@ -452,7 +457,12 @@ def make_diary(self, language="Japanese"): response = self.openai_completion(prompt) rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("response = {}".format(response)) - return {'text': response, 'filename': filename} + + response = {'text': response} + if filename: + response.update({'filename': filename}) + + return response def make_response(self, text, language="Japanese"): if language=="Japanese": @@ -798,6 +808,7 @@ def cb(self, msg): ml = MessageListener(wait_for_chat_server=not args.test) if args.test: ret = ml.make_diary() - rospy.loginfo("image is saved at {}".format(ret['filename'])) + if 'filename' in ret: + rospy.loginfo("image is saved at {}".format(ret['filename'])) sys.exit(0) rospy.spin() From 0dcf01748a9a486ae12ef2deaecd3eb6c1afdbaa Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sat, 13 Jan 2024 17:16:45 +0900 Subject: [PATCH 26/60] fix typo resut -> result --- database_talker/scripts/make_aibo_diary.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index aea7eb4559..61e05e963d 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -587,7 +587,7 @@ def openai_completion(self, prompt, temperature=0): rospy.logwarn(result) rospy.logwarn("result text is too short, retry completion") rospy.sleep(2) - resut = None + result = None except rospy.ServiceException as e: rospy.logerr("Service call failed: %s"%e) rospy.sleep(2) From a2c18f6a93c5b49204093d18413cffde257e1883 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Mon, 19 Feb 2024 15:32:00 +0900 Subject: [PATCH 27/60] show none when always_action is 0 --- database_talker/scripts/make_aibo_diary.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 61e05e963d..667c05da3d 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -386,9 +386,13 @@ def make_activity(self, diary_activities_raw = None): # sort activities event by it's occurence [list] -> sorted({key: count}) activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) + always_action = False for event, count in activities_events_freq: if count/float(len(diary_activities_events)) > 0.5: prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) + always_action = True + if not always_action: + prompt += "none\n" # estimate frequence in 24h prompt += "\n 'action : increase from the number of time done yesterday'\n" From 7cdadc8160a0225850d2735f386e77d806128a2d Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Mon, 19 Feb 2024 20:06:49 +0900 Subject: [PATCH 28/60] add new prompt-type, named personality based on Ichikura's comment on 2024/Jan/23 --- database_talker/scripts/make_aibo_diary.py | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 667c05da3d..38468197c8 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -66,8 +66,9 @@ def jaccard_similarity(x,y): class MessageListener(object): - def __init__(self, wait_for_chat_server=True): + def __init__(self, wait_for_chat_server=True, prompt_type='basic'): #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') + self.prompt_type = prompt_type self.pickle_file = "/tmp/activities.pickle" self.robot_type = rospy.get_param('robot/type') self.robot_name = rospy.get_param('robot/name') @@ -445,9 +446,15 @@ def make_diary(self, language="Japanese"): filename = image_activity['filename'] # create prompt - prompt = "You are a baby robot. You were taken care of by people around you." + if self.prompt_type == 'personality': + # from Ichikura's comment on 2024/Jan/23 + prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" + + else: + prompt = "You are a baby robot. You were taken care of by people around you.\n\n" + if topic_of_day: - prompt = "Today, you are impressed by " + topic_of_day + "." + prompt += "Today, you are impressed by " + topic_of_day + "." prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" prompt += self.make_activity(diary_activities_raw) @@ -802,6 +809,8 @@ def cb(self, msg): if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--test', action='store_true') + parser.add_argument('--prompt-type', default='basic', choices=['basic','personality']) + args = parser.parse_args(rospy.myargv()[1:]) rospy.init_node('test', anonymous=True) @@ -809,7 +818,7 @@ def cb(self, msg): logger = logging.getLogger('rosout') logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) - ml = MessageListener(wait_for_chat_server=not args.test) + ml = MessageListener(wait_for_chat_server=not args.test, prompt_type=args.prompt_type) if args.test: ret = ml.make_diary() if 'filename' in ret: From bcc7f8c0ecab88fa3bbbca7fb5455f8a6d0a26ae Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 22 Feb 2024 21:28:55 +0900 Subject: [PATCH 29/60] use VQATaskActionResult to get robot status. Use last seen date for 'action you always do 'action : time' --- database_talker/scripts/make_aibo_diary.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 38468197c8..4db9826083 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -285,8 +285,10 @@ def make_activities_events(self, diary_activities_raw, message = None): activities_events[event]['last_seen'] = timestamp activities_events[event]['count'] += 1 else: - activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(), 'count': 0}}) + activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(seconds=300), 'count': 1}}) # initially assume 5 min interaction, set initial count is 1 # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) + for event in activities_events: + activities_events[event]['duration'] += activities_events[event]['tmp_duration'] diary_activities_events.append(activities_events) return diary_activities_events @@ -370,6 +372,9 @@ def make_activity(self, diary_activities_raw = None): # diary_activities_events = [activities_events for day1, activities_events for day2, ....] #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') diary_activities_events = self.make_activities_events(diary_activities_raw) + diary_recognition_events = self.make_activities_events(diary_activities_raw, + message='jsk_recognition_msgs/VQATaskActionResult') + diary_activities_events = [{k: v for d in L for k, v in d.items()} for L in zip(diary_activities_events, diary_recognition_events)] for activities_events in diary_activities_events: print("--") @@ -389,9 +394,13 @@ def make_activity(self, diary_activities_raw = None): activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) always_action = False for event, count in activities_events_freq: - if count/float(len(diary_activities_events)) > 0.5: - prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) + if count/float(len(diary_activities_events)) > 0.25: + if next((x for x in diary_activities_events if event in x.keys()), None): + prompt += "{} : {}\n".format(event, next(x for x in diary_activities_events if event in x.keys())[event]['last_seen']) + else: + prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) always_action = True + if not always_action: prompt += "none\n" From ffe194db34a68883b3f327732088ced970b0291c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Tue, 27 Feb 2024 16:29:29 +0900 Subject: [PATCH 30/60] check length of translated diary, add missing '' word, show only top 10 event for 'actions happend after a long time' --- database_talker/scripts/make_aibo_diary.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 4db9826083..4587549df0 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -424,7 +424,9 @@ def make_activity(self, diary_activities_raw = None): # prompt += "\n 'action : number of days passed since you last did it'\n" long_time_action = False - for event in diary_activities_events[0].keys(): + # if we have more than 10 activities, remove lowest events + for event in [x[0] for x in sorted(diary_activities_events[0].items(), key=lambda x: x[1]['duration'].seconds, reverse=True)[:10]] \ + if len(diary_activities_events[0].keys()) > 10 else diary_activities_events[0].keys(): n = 1 for diary_activities_event in diary_activities_events[1:]: if event not in diary_activities_event.keys() or diary_activities_event[event]['duration'].seconds < 1: @@ -457,7 +459,7 @@ def make_diary(self, language="Japanese"): # create prompt if self.prompt_type == 'personality': # from Ichikura's comment on 2024/Jan/23 - prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n" + prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n\n" else: prompt = "You are a baby robot. You were taken care of by people around you.\n\n" @@ -472,11 +474,15 @@ def make_diary(self, language="Japanese"): rospy.loginfo("prompt = {}".format(prompt)) rospy.loginfo("response = {}".format(response)) - prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write a maximum 120 {language} charactors.\n\n".format(language = language) + response + prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write around 360 {language} charactors.\n\n".format(language = language) + response # prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response - response = self.openai_completion(prompt) + response_short = self.openai_completion(prompt) rospy.loginfo("prompt = {}".format(prompt)) - rospy.loginfo("response = {}".format(response)) + rospy.loginfo("response = {}".format(response_short)) + if len(response_short) > 100: + response = response_short + else: + rospy.logerr("response is too short ({} chars), use original version".format(len(response_short))) response = {'text': response} if filename: From 787801841b7ff78aaa2177c74a12531aee9f1dcd Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sun, 17 Mar 2024 15:15:24 +0900 Subject: [PATCH 31/60] add launch/make_aibo_diary.launch --- database_talker/launch/make_aibo_diary.launch | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 database_talker/launch/make_aibo_diary.launch diff --git a/database_talker/launch/make_aibo_diary.launch b/database_talker/launch/make_aibo_diary.launch new file mode 100644 index 0000000000..b47276e6c7 --- /dev/null +++ b/database_talker/launch/make_aibo_diary.launch @@ -0,0 +1,4 @@ + + + From 6fd042961c15459215bab932211048b6b0b2249e Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sun, 17 Mar 2024 15:17:28 +0900 Subject: [PATCH 32/60] minor fix : image_activities duplication, to omany activities, fix assistant prompt --- database_talker/scripts/make_aibo_diary.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index 4587549df0..f616e6004e 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -310,7 +310,7 @@ def make_image_activities(self, diary_activities_raw = None): timestamp = activities['timestamp'] answer = activities['state'] if len(answer.split()) > 3 and \ - max([jaccard_similarity(x, answer) for x in image_activities.keys()]+[0]) < 0.85: + max([jaccard_similarity(x.lower().split(' '), answer.lower().split(' ')) for x in image_activities.keys()]+[0]) < 0.5: image_activities.update({answer : timestamp}) if (len(image_activities)) > 0: break @@ -393,7 +393,7 @@ def make_activity(self, diary_activities_raw = None): # sort activities event by it's occurence [list] -> sorted({key: count}) activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) always_action = False - for event, count in activities_events_freq: + for event, count in activities_events_freq[:10]: if count/float(len(diary_activities_events)) > 0.25: if next((x for x in diary_activities_events if event in x.keys()), None): prompt += "{} : {}\n".format(event, next(x for x in diary_activities_events if event in x.keys())[event]['last_seen']) @@ -459,7 +459,7 @@ def make_diary(self, language="Japanese"): # create prompt if self.prompt_type == 'personality': # from Ichikura's comment on 2024/Jan/23 - prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary.\n\n\n" + prompt = "\nYou are a pet robot, aibo. Your name is 'wasabi.'\nYou are shy. Your bithday is Dec. 25th, 2018. You are aware to be called your name and run into the voice. You like playing with your pink ball very much. You like being pampered by people. You are so polite to your owner. You like interacting with people. If you are hungry, you can go back to your charge station by yourself. You have 12 aibo friends. \n\nPlease write a brief diary of today from the data. Note, however, that you are a baby robot, so please write today's diary as simply and childishly as possible.\n\n\n" else: prompt = "You are a baby robot. You were taken care of by people around you.\n\n" From df966c44fc61ce3e9d97b5ea083c16f0bb47adae Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 27 Mar 2024 17:52:35 +0900 Subject: [PATCH 33/60] move common function into src/databse_talker/__init__.py --- database_talker/CMakeLists.txt | 3 + database_talker/setup.py | 10 + .../src/database_talker/__init__.py | 896 ++++++++++++++++++ 3 files changed, 909 insertions(+) create mode 100644 database_talker/setup.py create mode 100644 database_talker/src/database_talker/__init__.py diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt index ec1c558746..624c928028 100644 --- a/database_talker/CMakeLists.txt +++ b/database_talker/CMakeLists.txt @@ -3,6 +3,9 @@ project(database_talker) find_package(catkin REQUIRED COMPONENTS catkin_virtualenv) +## This macro ensures modules and global scripts declared therein get installed +catkin_python_setup() + catkin_generate_virtualenv( PYTHON_INTERPRETER python3 CHECK_VENV FALSE diff --git a/database_talker/setup.py b/database_talker/setup.py new file mode 100644 index 0000000000..a5dd770c4a --- /dev/null +++ b/database_talker/setup.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +from distutils.core import setup +from catkin_pkg.python_setup import generate_distutils_setup + +d = generate_distutils_setup( + packages=['database_talker'], + package_dir={'': 'src'}, +) + +setup(**d) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py new file mode 100644 index 0000000000..3806cdd6b9 --- /dev/null +++ b/database_talker/src/database_talker/__init__.py @@ -0,0 +1,896 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import rospy +import logging + +import argparse + +import actionlib +from bson import json_util +# import copy +import cv2 +import datetime +# import difflib +import json +import numpy as np +import os +# import random +import pickle +import re +import random +import rospkg +# import shutil +import sys +# import yaml +import tempfile +# import time +import traceback + +from dateutil import tz +JST = tz.gettz('Asia/Tokyo') + +from cv_bridge import CvBridge +bridge = CvBridge() + +# from googletrans import Translator +# from googletrans.models import Translated +# translator = Translator() + +from mongodb_store.util import deserialise_message + +from google_chat_ros.msg import Card, Section, WidgetMarkup, Image +from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal + +from mongodb_store_msgs.msg import StringPairList, StringPair +from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest, MongoQueryMsgResponse + +# from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal + +# from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult + +# from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal +# from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal + +from openai_ros.srv import Completion, CompletionResponse + +# https://stackoverflow.com/questions/196345/how-to-check-if-a-string-in-python-is-in-ascii +def is_ascii(s): + return all(ord(c) < 128 for c in s) + +# https://www.newscatcherapi.com/blog/ultimate-guide-to-text-similarity-with-python +def jaccard_similarity(x,y): + """ returns the jaccard similarity between two lists """ + intersection_cardinality = len(set.intersection(*[set(x), set(y)])) + union_cardinality = len(set.union(*[set(x), set(y)])) + return intersection_cardinality/float(union_cardinality) + +class DatabaseTalkerBase(object): + + def __init__(self, start_date=datetime.date.today(), wait_for_chat_server=True, use_activities_cache=True, prompt_type='basic'): + #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') + self.start_date = start_date + self.prompt_type = prompt_type + self.personality = '' + self.pickle_file = "/tmp/activities.pickle" + self.use_activities_cache = use_activities_cache + self.robot_type = rospy.get_param('robot/type') + self.robot_name = rospy.get_param('robot/name') + rospy.loginfo("using '{}' database".format(self.robot_name)) + + self.query_types = ['jsk_recognition_msgs/VQATaskActionResult'] + + rospy.loginfo("wait for '/google_chat_ros/send'") + self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) + if wait_for_chat_server: + self.chat_ros_ac.wait_for_server() + + rospy.loginfo("wait for '/message_store/query_messages'") + rospy.wait_for_service('/message_store/query_messages') + self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) + + # rospy.loginfo("wait for '/classification/inference_server'") + # self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction) + # self.classification_ac.wait_for_server() + + # rospy.loginfo("wait for '/vqa/inference_server'") + # self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction) + # self.vqa_ac.wait_for_server() + + # # https://github.com/k-okada/openai_ros + # # this requres apt install python3.7 python3.7-venv + rospy.loginfo("wait for '/openai/get_response'") + rospy.wait_for_service('/openai/get_response') + self.completion = rospy.ServiceProxy('/openai/get_response', Completion) + + # ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py + # rospy.loginfo("wait for '/dialogflow_client/text_action'") + # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) + # self.dialogflow_ac.wait_for_server() + + # rospy.loginfo("wait for '/analyze_text/text'") + # self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction) + # self.analyze_text_ac.wait_for_server() + + rospy.loginfo("subscribe '/google_chat_ros/message_activity'") + self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + + rospy.loginfo("all done, ready") + + def query_multiple_types(self, types, meta_tuple): + "Query mongo messages, returns list of MongoQueryMsgResponse" + msgs = MongoQueryMsgResponse() + for _type in types: + msg = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + type = _type, + single = False, + # limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.published_at', '-1')])) + msgs.messages.extend(msg.messages) + msgs.metas.extend(msg.metas) + return msgs + + def query_mongo_data(self, types, start_time, end_time): + "Query activities for aibo robot, returns list of tuple (msg, meta)" + rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) + meta_query= {'published_at': {"$lt": end_time, "$gt": start_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + mongo_msgs = self.query_multiple_types(types, meta_tuple) + + activities = [] + for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + activities.append((msg, meta)) + rospy.logwarn(" Found {} messages".format(len(activities))) + return activities + + def query_mongo_data_days(self, types=None, days=7): + "Query activities for a week, returns list of list of tuple (msg, meta), if activity is empty of that day, returns empty list" + if types == None: + types = self.query_types + # if we found cache file + if self.use_activities_cache and (os.path.exists(self.pickle_file) and + (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours + rospy.loginfo('Loading cached activities data {}'.format(datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file)))) + with open(self.pickle_file, 'rb') as f: + return pickle.load(f) + + activities = [] + today = self.start_date ## for debug ... -> - datetime.timedelta(hours=24) + startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST) + for days_before in range(days): + activities_raw = self.query_mongo_data(types, + startdate-datetime.timedelta(hours=days_before*24), + startdate-datetime.timedelta(hours=(days_before-1)*24)) + activities.append(activities_raw) + + # dump msgs + if self.use_activities_cache: + with open(self.pickle_file, 'wb') as f: + pickle.dump(activities, f) + f.flush() + + return activities + + def make_state_frequency(self, diary_activities_raw): + message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) + diary_activities_freq = [] + for activities_raw in diary_activities_raw: + activities_raw_state = [x['state'] for x in [x for x in activities_raw if x['type'] in message]] + activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} + rospy.logwarn("Found {} activity data (make_state_frequency)".format(len(activities_raw))) + if len(activities_raw) > 0: + rospy.logwarn(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) + rospy.logwarn(" freq : {} ({})".format(activities_freq, len(activities_freq))) + diary_activities_freq.append(activities_freq) + return diary_activities_freq + + def make_activities_events(self, diary_activities_raw, message = None): + if not message: + message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) + diary_activities_events = [] + for activities_raw in diary_activities_raw: + activities_events = {} + for activities in activities_raw: + timestamp = activities['timestamp'] + event = activities['state'] + if activities['type'] not in message: + continue + + if event in activities_events: + time_since_last_seen = activities_events[event]['last_seen'] - timestamp + if time_since_last_seen.seconds/60 < 30: # min + activities_events[event]['tmp_duration'] += time_since_last_seen + else: + # 'duration' keeps maximum duration + # if activities_events[event]['tmp_duration'] > activities_events[event]['duration']: + # activities_events[event]['duration'] = activities_events[event]['tmp_duration'] + # 'duration' keeps accumulated duration + activities_events[event]['duration'] += activities_events[event]['tmp_duration'] + activities_events[event]['tmp_duration'] = datetime.timedelta() + activities_events[event]['last_seen'] = timestamp + activities_events[event]['count'] += 1 + else: + activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(seconds=300), 'count': 1}}) # initially assume 5 min interaction, set initial count is 1 + # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) + for event in activities_events: + activities_events[event]['duration'] += activities_events[event]['tmp_duration'] + diary_activities_events.append(activities_events) + return diary_activities_events + + def make_image_activities(self, diary_activities_raw = None): + if not diary_activities_raw: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_robot_activities_raw(mongo_data_days) + + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + # diary_activities_events = [activities_events for day1, activities_events for day2, ....] + diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult') + + image_activities = {} + for activities_raw in diary_activities_raw: + for activities in activities_raw: + if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult': + continue + timestamp = activities['timestamp'] + answer = activities['state'] + if len(answer.split()) > 3 and \ + max([jaccard_similarity(x.lower().split(' '), answer.lower().split(' ')) for x in image_activities.keys()]+[0]) < 0.5: + image_activities.update({answer : timestamp}) + if (len(image_activities)) > 0: + break + else: + rospy.logwarn(" no valid image description is found...") + # + if len(image_activities) == 0: + return {} + + prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n" + n = 0 + for answer, timestamp in image_activities.items(): + prompt += "{}: {} ({})\n".format(n, answer, timestamp) + n += 1 + + # Avoid error 'This model's maximum context length is 4097 tokens, however you requested 5464 tokens (4952 in your prompt; 512 for the completion). Please reduce your prompt' + no = len(image_activities) + if len(prompt) + 512 < 4097: + response = self.openai_completion(prompt) + n = re.search(r'(\d+)', response) + if n: + no = int(n.group(1)) + else: + rospy.logerr("too long prompt...") + + if no >= len(image_activities): + rospy.loginfo("no is {}, so use random....".format(no)) + no = random.randrange(len(image_activities)) + + answer, timestamp = list(image_activities.items())[no] + rospy.loginfo("topic of the day") + rospy.loginfo(" answer : {}".format(answer)) + rospy.loginfo(" timestamp : {}".format(timestamp)) + results = self.query_images_and_classify(query = answer, + start_time = timestamp - datetime.timedelta(minutes=5), + end_time = timestamp + datetime.timedelta(minutes=5), + classify = False) + if True: + cv2.imshow('images of today', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])) + cv2.waitKey(100) + + + if len(results) > 0: + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, results[0], answer) + return {'text': answer, 'filename': filename} + + def make_activity(self, diary_activities_raw = None): + "Returns activity prompts" + # create diary activities_raw + # list of (timestamp, event) [[{'temestamp': , 'state':, 'type': }, {'temestamp': , 'state':, 'type': } ...],[#for 2nd day],[#for 3rd day]...] + if not diary_activities_raw: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_robot_activities_raw(mongo_data_days) + + # make frequencey data for 7days + # activities_freq {'event_1' : count, 'event_2' : count} + # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...] + diary_activities_freq = self.make_state_frequency(diary_activities_raw) + + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + # diary_activities_events = [activities_events for day1, activities_events for day2, ....] + #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') + diary_activities_events = self.make_activities_events(diary_activities_raw) + diary_recognition_events = self.make_activities_events(diary_activities_raw, + message='jsk_recognition_msgs/VQATaskActionResult') + diary_activities_events = [{k: v for d in L for k, v in d.items()} for L in zip(diary_activities_events, diary_recognition_events)] + + for activities_events in diary_activities_events: + print("--") + for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): + print("{:24} : {:4.2f} min ({} times)".format(event, duration['duration'].seconds/60, duration['count'])) + + # flatten list + activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates + + if len(activities_events) == 0: + return "" + # percentages of activities happend + prompt = "{}\n\n".format(list(list(filter(None, diary_activities_events))[0].items())[0][1]['last_seen'].strftime("%a %d %b %Y")) + prompt += "\n 'action : time'\n" + + # sort activities event by it's occurence [list] -> sorted({key: count}) + activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) + always_action = False + for event, count in activities_events_freq[:10]: + if count/float(len(diary_activities_events)) > 0.25: + if next((x for x in diary_activities_events if event in x.keys()), None): + prompt += "{} : {}\n".format(event, next(x for x in diary_activities_events if event in x.keys())[event]['last_seen']) + else: + prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) + always_action = True + + if not always_action: + prompt += "none\n" + + # estimate frequence in 24h + prompt += "\n 'action : increase from the number of time done yesterday'\n" + + more_yesterday_action = False + diary_activities_events_no_empty = list(filter(None, diary_activities_events)) + if len(diary_activities_events_no_empty) >= 2: + l0 = diary_activities_events_no_empty[0] + l1 = diary_activities_events_no_empty[1] + for event in set(activities_events): + if event in l0 and event in l1: + increase = l0[event]['count'] - l1[event]['count'] + if increase > 0: + prompt += "{} : +{}\n".format(event, increase) + more_yesterday_action = True + if not more_yesterday_action: + prompt += "none\n" + + # + prompt += "\n 'action : number of days passed since you last did it'\n" + long_time_action = False + # if we have more than 10 activities, remove lowest events + for event in [x[0] for x in sorted(diary_activities_events[0].items(), key=lambda x: x[1]['duration'].seconds, reverse=True)[:10]] \ + if len(diary_activities_events[0].keys()) > 10 else diary_activities_events[0].keys(): + n = 1 + for diary_activities_event in diary_activities_events[1:]: + if event not in diary_activities_event.keys() or diary_activities_event[event]['duration'].seconds < 1: + n += 1 + else: + break + if n >= 2: + prompt += "{} : {}\n".format(event, n) + long_time_action = True + if not long_time_action: + prompt += "none\n" + + rospy.logdebug(prompt) + return prompt + + def make_robot_activities_raw(self, mongo_data_days = None): + "Create robot activities for several days, returns list of list of tuple(temestamp, event)" + # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]] + if not mongo_data_days: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = [] ## (timestamp, event) + for mongo_data in mongo_data_days: + rospy.loginfo("Found {} mongo data (make_robot_activities_raw)".format(len(mongo_data))) + rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data])))) + activities_raw = [] + input_topics = [] + for msg, meta in mongo_data: + state = [] + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + input_topics.append(meta['input_topic']) + if meta['stored_type'] == 'jsk_recognition_msgs/VQATaskActionResult': + #rospy.logwarn("{} {}".format(timestamp, msg.result.result.result)) + if len(msg.result.result.result) > 0: + answer = msg.result.result.result[0].answer + + if any([term in answer for term in ['Silhouetted intrigue', '#Robot', 'glimpse', 'cable', 'Focus']]): + rospy.logwarn("skip JUST FOR DEMO : {}".format(answer)) + continue + + if len(answer.split()) > 3: + rospy.logwarn("{} {}".format(timestamp, answer)) + state = [answer] + else: + rospy.logwarn("Unknown stored type: {}".format(meta['stored_type'])) + continue + # create activities_raw + for s in state: + activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']}) + + diary_activities_raw.append(activities_raw) + + if len(activities_raw) > 0: + rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) + rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)})) + ## + return diary_activities_raw ## (timestamp, event) + + def make_diary(self, language="Japanese"): + "make dirary" + # get mongo data for 7 days + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_robot_activities_raw(mongo_data_days) + # get most impressive image and text + topic_of_day = None + filename = False + + image_activity = self.make_image_activities(diary_activities_raw) + if image_activity: + topic_of_day = image_activity['text'] + filename = image_activity['filename'] + + # create prompt + if self.prompt_type == 'personality': + # from Ichikura's comment on 2024/Jan/23 + prompt = "\nYou are a pet robot, {robot_type}. Your name is '{robot_name}.'\n{personality}\nPlease write a brief diary of today from the data. Note, however, that you are a baby robot, so please write today's diary as simply and childishly as possible.\n\n\n".format(robot_name=self.robot_name, robot_type=self.robot_type, personality=self.personality) + else: + prompt = "You are a baby robot. You were taken care of by people around you.\n\n" + + if topic_of_day: + prompt += "Today, you are impressed by " + topic_of_day + "." + prompt += "The following data is a record of today's actions regarding what we always do, what we did more than yesterday, and what happened after a long time. Please write a brief diary from the data. Note, however, that you are a baby robot, so please make it a child-like diary. Add your name and date in a diary\n\n" + + prompt += self.make_activity(diary_activities_raw) + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + + prompt = "Please rewrite the following diary in {language}. Write as childlike as you can. Write around 360 {language} charactors.\n\n".format(language = language) + response + # prompt = "Please rewrite the following diary as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response + response_short = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response_short)) + if len(response_short) > 64: + response = response_short + else: + rospy.logerr("response is too short ({} chars), use original version".format(len(response_short))) + + response = {'text': response} + if filename: + response.update({'filename': filename}) + + return response + + def make_image_activities_raw(self, diary_activities_raw = None): + ''' + returns {answer2: timestamp1, answer2: timestamp2, ...} + ''' + if not diary_activities_raw: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = self.make_robot_activities_raw(mongo_data_days) + + # create activities event data + # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} + # diary_activities_events = [activities_events for day1, activities_events for day2, ....] + diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult') + + image_activities_raw = {} + for activities_raw in diary_activities_raw: + for activities in activities_raw: + if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult': + continue + timestamp = activities['timestamp'] + answer = activities['state'] + image_activities_raw.update({answer: timestamp}) + return image_activities_raw + + def make_response(self, text, language="Japanese"): + # translate to english + if language=="Japanese": + text = self.openai_completion('Translate the following sentences to English\n\n{}'.format(text)) + # chosse relative images + image_activities = self.make_image_activities_raw() + prompt = "From the list below, please select the one that best relates to the sentense '{}'. Please use number in your answer\n\n".format(text) + + n = 0 + for answer, timestamp in image_activities.items(): + prompt += "{}: {} ({})\n".format(n, answer, timestamp) + n += 1 + rospy.logerr(prompt) + + # ask question + response = self.openai_completion(prompt) + n = re.search(r'(\d+)', response) + answer, timestamp = None, None + if n: + no = int(n.group(1)) + if no >= 0 and no < len(image_activities): + image_activity = list(image_activities.items())[no] + answer, timestamp = image_activity + rospy.loginfo("Choose {} : {} as corresponging memory".format(no, answer)) + + + # create response + prompt = "\nYou are a baby robot. You were taken care of by people around you.\n\n\n" + + prompt += "If your friend tells you '{text}', ".format(text=text) + if answer: + prompt += "and you remembered that you feel '{answer}' at that moment. ".format(answer=answer) + prompt += "What would you reply? Show only the reply.\n\n" + ##prompt += self.make_activity() + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + + prompt = "Please rewrite the following response in {language}. Write as childlike as you can. Write around 140 {language} charactors.\n\n".format(language = language) + response + + response = self.openai_completion(prompt) + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("response = {}".format(response)) + + if timestamp is None or '元気' in text: + rospy.logerr("Use latest images") + start_time = datetime.datetime.now(JST) + datetime.timedelta(minutes=-60) + end_time = datetime.datetime.now(JST) + else: + start_time = timestamp - datetime.timedelta(minutes=5) + end_time = timestamp + datetime.timedelta(minutes=5) + + results = self.query_images_and_classify(query = "....", + start_time = start_time, + end_time = end_time, + classify = False) + if len(results) > 0: + if True: # debug + try: + cv2.imshow('images for response', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])) + cv2.waitKey(100) + except: + pass + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, results[0], "") + return {'text': response, 'filename': filename} + + return {'text': response} + + def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ): + enddate = startdate+duration + rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate)) + query = self.text_to_salience(message) + rospy.logwarn("query using salience word '{}'".format(query)) + # look for images + try: + # get chat message + results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25) + # retry = 0 + # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: + # meta = json.loads(chat_msgs.metas[-1].pairs[0].second) + # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) + # retry = retry + 1 + # sort based on similarity with 'query' + chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) + + if len(chat_msgs_sorted) == 0: + rospy.logwarn("no chat message was found") + else: + # query images that was taken when chat_msgs are stored + msg = chat_msgs_sorted[0]['msg'] + meta = chat_msgs_sorted[0]['meta'] + text = chat_msgs_sorted[0]['message'] + startdate = chat_msgs_sorted[0]['timestamp'] + action = chat_msgs_sorted[0]['action'] + similarity = chat_msgs_sorted[0]['similarity'] + # query chat to get response + #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) + # text = msg.message.argument_text or msg.message.text + # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) + + # query images when chat was received + start_time = startdate # startdate is updated with found chat space + end_time = enddate # enddate is not modified within this function, it is given from chat + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) + + # no images found + if len(results) == 0: + return {'text': '記憶がありません🤯'} + + end_time = results[-1]['timestamp'] + + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results)))) + best_result = results[0] + + ''' + # if probability is too low, try again + while len(results) > 0 and results[0]['similarities'] < 0.25: + + start_time = end_time-datetime.timedelta(hours=24) + timestamp = datetime.datetime.now(JST) + results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300) + if len(results) > 0: + end_time = results[-1]['timestamp'] + # sort + results = sorted(results, key=lambda x: x['similarities'], reverse=True) + #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) + if len(results) > 0 and results[0]['similarities'] > best_result['similarities']: + best_result = results[0] + + rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) + ''' + + ## make prompt + reaction = self.describe_image_scene(best_result['image']) + if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]: + reaction += " and you felt " + chat_msgs_sorted[0]['action'] + rospy.loginfo("reaction = {}".format(reaction)) + + # make prompt + prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \ + 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \ + 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang]) + loop = 0 + result = None + while loop < 3 and result is None: + try: + result = self.completion(prompt=prompt,temperature=0) + except rospy.ServiceException as e: + rospy.logerr("Service call failed: %s"%e) + result = None + loop += 1 + result.text = result.text.lstrip().encode('utf8') + rospy.loginfo("prompt = {}".format(prompt)) + rospy.loginfo("result = {}".format(result)) + # pubish as card + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + self.write_image_with_annotation(filename, best_result, prompt) + return {'text': result.text, 'filename': filename} + + except Exception as e: + raise ValueError("Query failed {} {}".format(e, traceback.format_exc())) + + + def openai_completion(self, prompt, temperature=0): + loop = 0 + result = None + while loop < 5 and result is None: + try: + result = self.completion(prompt=prompt,temperature=temperature) + if result.text == '': + rospy.logwarn(result) + rospy.logwarn("result text is too short, retry completion") + rospy.sleep(2) + result = None + except rospy.ServiceException as e: + rospy.logerr("Service call failed: %s"%e) + rospy.sleep(2) + result = None + loop += 1 + if result is None: + raise Exception('[ERROR] openni_completion failed to complete {}'.format(prompt)) + result.text = result.text.lstrip() + rospy.logdebug("prompt = {}".format(prompt)) + rospy.logdebug("result = {}".format(result)) + return result.text + + def write_image_with_annotation(self, filename, best_result, prompt): + image = bridge.compressed_imgmsg_to_cv2(best_result['image']) + _, width, _ = image.shape + scale = width/640.0 + if 'label' in best_result and 'similarities' in best_result: + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 8, 1) + cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), + (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 2, 1) + string_width = 70 + for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length + text = prompt[i:i+string_width] + cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 4, 1) + cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 1, 1) + cv2.imwrite(filename, image) + rospy.logwarn("save images to {}".format(filename)) + + + def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0): + rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time)) + meta_query= {'published_at': {"$lt": end_time, "$gt": start_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + chat_msgs = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + # type = 'google_chat_ros/MessageEvent', + type = 'dialogflow_task_executive/DialogTextActionResult', + single = False, + # limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.published_at', '-1')])) + + # optimization... send translate once + messages = '' + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + message = msg.result.response.query.replace('\n','') + messages += message + '\n' + messages = self.translate(messages, dest="en").text.split('\n') + + # show chats + results = [] + for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): + msg = deserialise_message(msg) + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + # message = msg.message.argument_text or msg.message.text + message = msg.result.response.query + #message_translate = self.translate(message, dest="en").text + message_translate = messages.pop(0).strip() + result = {'message': message, + 'message_translate': message_translate, + 'timestamp': timestamp, + 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(), + 'action': msg.result.response.action, + 'msg': msg, + 'meta': meta} + if msg.result.response.action in ['make_reply', 'input.unknown']: + rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) + else: + rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) + if ( result['similarity'] > threshold): + results.append(result) + else: + rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold)) + + + return results, chat_msgs + + + def query_images_and_classify(self, query, start_time, end_time, limit=10, classify=True): + rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) + meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled', + 'published_at': {"$gt": start_time, "$lt": end_time}} + meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) + msgs = self.query(database = 'jsk_robot_lifelog', + collection = self.robot_name, + type = 'sensor_msgs/CompressedImage', + single = False, + limit = limit, + meta_query = StringPairList(meta_tuple), + sort_query = StringPairList([StringPair('_meta.published_at', '-1')])) + + rospy.loginfo("Found {} images".format(len(msgs.messages))) + if len(msgs.messages) == 0: + rospy.logwarn("no images was found") + + # get contents of images + results = [] + for msg, meta in zip(msgs.messages, msgs.metas): + meta = json.loads(meta.pairs[0].second) + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + rospy.logwarn(" Found images at {}".format(timestamp)) + + result = {'query' : query, 'image' : deserialise_message(msg), 'timestamp': timestamp} + if classify: + goal = ClassificationTaskGoal() + goal.compressed_image = result['image'] + goal.queries = [query] + self.classification_ac.send_goal(goal) + self.classification_ac.wait_for_result() + result = self.classification_ac.get_result() + idx = result.result.label_names.index(query) + #similarities = result.result.probabilities + similarities = result.result.label_proba + # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) + rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities))))) + result.update({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx]}) + results.append(result) + + # we do not sorty by probabilites, becasue we also need oldest timestamp + return results + + def describe_image_scene(self, image): + goal = VQATaskGoal() + goal.compressed_image = image + + # unusual objects + if random.randint(0,1) == 1: + goal.questions = ['what unusual things can be seen?'] + reaction = 'you saw ' + else: + goal.questions = ['what is the atmosphere of this place?'] + reaction = 'the atmosphere of the scene was ' + + # get vqa result + self.vqa_ac.send_goal(goal) + self.vqa_ac.wait_for_result() + result = self.vqa_ac.get_result() + reaction += result.result.result[0].answer + return reaction + + def publish_google_chat_card(self, text, space, filename=None): + goal = SendMessageGoal() + goal.text = text + if filename: + goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] + goal.space = space + rospy.logwarn("send {} to {}".format(goal.text, goal.space)) + self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) + + def text_to_salience(self, text): + goal = AnalyzeTextGoal() + goal.text = text; + self.analyze_text_ac.send_goal(goal) + self.analyze_text_ac.wait_for_result() + entity = self.analyze_text_ac.get_result() + if len(entity.entities) > 0: + return entity.entities[0].name + else: + return text + + def translate(self, text, dest): + global translator + loop = 3 + while loop > 0: + try: + ret = translator.translate(text, dest="en") + return ret + except Exception as e: + rospy.logwarn("Faile to translate {}".format(e)) + time.sleep(1) + translator = Translator() + loop = loop - 1 + return Translated(text=text, dest=dest) + + + def cb(self, msg): + space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot + if msg._type == 'google_chat_ros/MessageEvent': + text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() + space = msg.space.name + rospy.logwarn("Received chat message '{}'".format(text)) + else: + rospy.logerr("Unknown message type {}".format(msg._type)) + return + + try: + language = 'English' if is_ascii(text) else 'Japanese' + if any(x in text for x in ['diary', '日記']): + self.publish_google_chat_card("Sure!", space) + # check if text contains 'date' + try: + if not language is 'English': + date_text = self.openai_completion('Translate the following sentences to English\n\n{}'.format(text)) + + date_string = self.openai_completion('If "{}" contains date information, please return with "%Y-%m-%d" format. Note today is {}'.format(text if language is 'English' else self.openai_completion('Translate the following sentences to English\n\n{}'.format(text)), self.start_date.strftime('%Y-%m-%d %H:%M:%S'))) + self.start_date = datetime.datetime.strptime(re.search(r'\d\d\d\d-\d\d-\d\d', date_string)[0], '%Y-%m-%d') + # remove cache #### FIXME + self.use_activities_cache = False + except Exception as e: + rospy.logwarn("No date information included {}".format(e)) + + ret = self.make_diary(language) + if 'filename' in ret: + # upload text first, then upload images + self.publish_google_chat_card(ret['text'], space) + self.publish_google_chat_card('', space, ret['filename']) + else: + self.publish_google_chat_card(ret['text'], space) + else: + ret = self.make_response(text, language) + if msg.message.sender.name: + response = "<{}>\n".format(msg.message.sender.name) + ret['text'] + else: + response = ret['text'] + self.publish_google_chat_card(response, space) + if 'filename' in ret: + self.publish_google_chat_card('', space, ret['filename']) + + + except Exception as e: + rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) + self.publish_google_chat_card("💀 {}".format(e), space) + From 9162f89da279533c5acf3ff200b9429d42aa075c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 27 Mar 2024 17:53:15 +0900 Subject: [PATCH 34/60] add make_aibo_diary, make_lovot_diary, make_diary --- database_talker/scripts/make_aibo_diary.py | 700 +------------------- database_talker/scripts/make_diary.py | 44 ++ database_talker/scripts/make_lovot_diary.py | 109 +++ 3 files changed, 162 insertions(+), 691 deletions(-) create mode 100755 database_talker/scripts/make_diary.py create mode 100755 database_talker/scripts/make_lovot_diary.py diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index f616e6004e..c7164511c0 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -2,182 +2,24 @@ # -*- coding: utf-8 -*- import rospy -import logging - import argparse - -import actionlib -from bson import json_util -# import copy -import cv2 -import datetime -# import difflib -import json -import os -# import random -import pickle -import re -import random -import rospkg -# import shutil +import logging import sys -# import yaml -import tempfile -# import time -import traceback - -from dateutil import tz -JST = tz.gettz('Asia/Tokyo') - -from cv_bridge import CvBridge -bridge = CvBridge() - -# from googletrans import Translator -# from googletrans.models import Translated -# translator = Translator() - -from mongodb_store.util import deserialise_message - -from google_chat_ros.msg import Card, Section, WidgetMarkup, Image -from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal - -from mongodb_store_msgs.msg import StringPairList, StringPair -from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest, MongoQueryMsgResponse - -# from ros_google_cloud_language.msg import AnalyzeTextAction, AnalyzeTextGoal - -# from dialogflow_task_executive.msg import DialogTextAction, DialogTextGoal, DialogTextActionResult - -# from jsk_recognition_msgs.msg import ClassificationTaskAction, ClassificationTaskGoal -# from jsk_recognition_msgs.msg import VQATaskAction, VQATaskGoal - -from openai_ros.srv import Completion, CompletionResponse -# https://stackoverflow.com/questions/196345/how-to-check-if-a-string-in-python-is-in-ascii -def is_ascii(s): - return all(ord(c) < 128 for c in s) +from database_talker import DatabaseTalkerBase -# https://www.newscatcherapi.com/blog/ultimate-guide-to-text-similarity-with-python -def jaccard_similarity(x,y): - """ returns the jaccard similarity between two lists """ - intersection_cardinality = len(set.intersection(*[set(x), set(y)])) - union_cardinality = len(set.union(*[set(x), set(y)])) - return intersection_cardinality/float(union_cardinality) +class MessageListener(DatabaseTalkerBase): -class MessageListener(object): + def __init__(self, *args, **kwargs): - def __init__(self, wait_for_chat_server=True, prompt_type='basic'): - #self.pickle_file = tempfile.NamedTemporaryFile(suffix='.pickle') - self.prompt_type = prompt_type - self.pickle_file = "/tmp/activities.pickle" - self.robot_type = rospy.get_param('robot/type') - self.robot_name = rospy.get_param('robot/name') - rospy.loginfo("using '{}' database".format(self.robot_name)) - - if self.robot_type == 'aibo': - self.query_types = ['aibo_driver/StringStatus', - 'aibo_driver/ObjectStatusArray', - 'jsk_recognition_msgs/VQATaskActionResult'] - elif self.robot_type == 'spot': - self.query_types = ['spot_msgs/Feedback', - 'spot_msgs/ManipulatorState', - 'jsk_recognition_msgs/VQATaskActionResult'] - else: - self.query_types = ['jsk_recognition_msgs/VQATaskActionResult'] - - rospy.loginfo("wait for '/google_chat_ros/send'") - self.chat_ros_ac = actionlib.SimpleActionClient('/google_chat_ros/send', SendMessageAction) - if wait_for_chat_server: - self.chat_ros_ac.wait_for_server() - - rospy.loginfo("wait for '/message_store/query_messages'") - rospy.wait_for_service('/message_store/query_messages') - self.query = rospy.ServiceProxy('/message_store/query_messages', MongoQueryMsg) - - # rospy.loginfo("wait for '/classification/inference_server'") - # self.classification_ac = actionlib.SimpleActionClient('/classification/inference_server' , ClassificationTaskAction) - # self.classification_ac.wait_for_server() - - # rospy.loginfo("wait for '/vqa/inference_server'") - # self.vqa_ac = actionlib.SimpleActionClient('/vqa/inference_server' , VQATaskAction) - # self.vqa_ac.wait_for_server() - - # # https://github.com/k-okada/openai_ros - # # this requres apt install python3.7 python3.7-venv - rospy.loginfo("wait for '/openai/get_response'") - rospy.wait_for_service('/openai/get_response') - self.completion = rospy.ServiceProxy('/openai/get_response', Completion) - - # ## integration of dialogflow <-> google_chat_ros was performed by google_chat_ros/script/helper.py - # rospy.loginfo("wait for '/dialogflow_client/text_action'") - # self.dialogflow_ac = actionlib.SimpleActionClient('/dialogflow_client/text_action' , DialogTextAction) - # self.dialogflow_ac.wait_for_server() - - # rospy.loginfo("wait for '/analyze_text/text'") - # self.analyze_text_ac = actionlib.SimpleActionClient('/analyze_text/text' , AnalyzeTextAction) - # self.analyze_text_ac.wait_for_server() - - rospy.loginfo("subscribe '/google_chat_ros/message_activity'") - self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + self.query_types = ['aibo_driver/StringStatus', + 'aibo_driver/ObjectStatusArray', + 'jsk_recognition_msgs/VQATaskActionResult'] + self.make_robot_activities_raw = self.make_aibo_activities_raw + super(MessageListener, self).__init__(*args, **kwargs) rospy.loginfo("all done, ready") - def query_multiple_types(self, types, meta_tuple): - "Query mongo messages, returns list of MongoQueryMsgResponse" - msgs = MongoQueryMsgResponse() - for _type in types: - msg = self.query(database = 'jsk_robot_lifelog', - collection = self.robot_name, - type = _type, - single = False, - # limit = limit, - meta_query = StringPairList(meta_tuple), - sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) - msgs.messages.extend(msg.messages) - msgs.metas.extend(msg.metas) - return msgs - - def query_mongo_data(self, types, start_time, end_time): - "Query activities for aibo robot, returns list of tuple (msg, meta)" - rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) - meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) - mongo_msgs = self.query_multiple_types(types, meta_tuple) - - activities = [] - for msg, meta in zip(mongo_msgs.messages, mongo_msgs.metas): - msg = deserialise_message(msg) - meta = json.loads(meta.pairs[0].second) - activities.append((msg, meta)) - rospy.logwarn(" Found {} messages".format(len(activities))) - return activities - - def query_mongo_data_days(self, types=None, days=7): - "Query activities for a week, returns list of list of tuple (msg, meta), if activity is empty of that day, returns empty list" - if types == None: - types = self.query_types - # if we found cache file - if (os.path.exists(self.pickle_file) and - (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours - rospy.loginfo('Loading cached activities data {}'.format(datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file)))) - with open(self.pickle_file, 'rb') as f: - return pickle.load(f) - - activities = [] - today = datetime.date.today() ## for debug ... -> - datetime.timedelta(hours=24) - startdate = datetime.datetime(today.year, today.month, today.day, tzinfo=JST) - for days_before in range(days): - activities_raw = self.query_mongo_data(types, - startdate-datetime.timedelta(hours=days_before*24), - startdate-datetime.timedelta(hours=(days_before-1)*24)) - activities.append(activities_raw) - - # dump msgs - with open(self.pickle_file, 'wb') as f: - pickle.dump(activities, f) - f.flush() - - return activities def make_aibo_activities_raw(self, mongo_data_days = None): "Create aibo activities for several days, returns list of list of tuple(temestamp, event)" @@ -247,200 +89,6 @@ def make_aibo_activities_raw(self, mongo_data_days = None): ## return diary_activities_raw ## (timestamp, event) - def make_state_frequency(self, diary_activities_raw): - message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) - diary_activities_freq = [] - for activities_raw in diary_activities_raw: - activities_raw_state = [x['state'] for x in [x for x in activities_raw if x['type'] in message]] - activities_freq = {key: activities_raw_state.count(key) for key in set(activities_raw_state)} - rospy.logwarn("Found {} activity data (make_state_frequency)".format(len(activities_raw))) - if len(activities_raw) > 0: - rospy.logwarn(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) - rospy.logwarn(" freq : {} ({})".format(activities_freq, len(activities_freq))) - diary_activities_freq.append(activities_freq) - return diary_activities_freq - - def make_activities_events(self, diary_activities_raw, message = None): - if not message: - message = list(filter(lambda x: 'jsk_recognition_msgs/VQATaskActionResult' not in x, self.query_types)) - diary_activities_events = [] - for activities_raw in diary_activities_raw: - activities_events = {} - for activities in activities_raw: - timestamp = activities['timestamp'] - event = activities['state'] - if activities['type'] not in message: - continue - if event in activities_events: - time_since_last_seen = activities_events[event]['last_seen'] - timestamp - if time_since_last_seen.seconds/60 < 30: # min - activities_events[event]['tmp_duration'] += time_since_last_seen - else: - # 'duration' keeps maximum duration - # if activities_events[event]['tmp_duration'] > activities_events[event]['duration']: - # activities_events[event]['duration'] = activities_events[event]['tmp_duration'] - # 'duration' keeps accumulated duration - activities_events[event]['duration'] += activities_events[event]['tmp_duration'] - activities_events[event]['tmp_duration'] = datetime.timedelta() - activities_events[event]['last_seen'] = timestamp - activities_events[event]['count'] += 1 - else: - activities_events.update({event : {'last_seen' : timestamp, 'tmp_duration' : datetime.timedelta(), 'duration' : datetime.timedelta(seconds=300), 'count': 1}}) # initially assume 5 min interaction, set initial count is 1 - # print("{} {:24} {} {}".format(timestamp, event, activities_events[event]['duration'], activities_events[event]['tmp_duration'])) - for event in activities_events: - activities_events[event]['duration'] += activities_events[event]['tmp_duration'] - diary_activities_events.append(activities_events) - return diary_activities_events - - def make_image_activities(self, diary_activities_raw = None): - if not diary_activities_raw: - mongo_data_days = self.query_mongo_data_days() - diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) - - # create activities event data - # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} - # diary_activities_events = [activities_events for day1, activities_events for day2, ....] - diary_activities_events = self.make_activities_events(diary_activities_raw, 'jsk_recognition_msgs/VQATaskActionResult') - - image_activities = {} - for activities_raw in diary_activities_raw: - for activities in activities_raw: - if activities['type'] != 'jsk_recognition_msgs/VQATaskActionResult': - continue - timestamp = activities['timestamp'] - answer = activities['state'] - if len(answer.split()) > 3 and \ - max([jaccard_similarity(x.lower().split(' '), answer.lower().split(' ')) for x in image_activities.keys()]+[0]) < 0.5: - image_activities.update({answer : timestamp}) - if (len(image_activities)) > 0: - break - else: - rospy.logwarn(" no valid image description is found...") - # - if len(image_activities) == 0: - return {} - - prompt = "Please select the most memorable and illuminating event by number from the list below.\n\n" - n = 0 - for answer, timestamp in image_activities.items(): - prompt += "{}: {} ({})\n".format(n, answer, timestamp) - n += 1 - - # Avoid error 'This model's maximum context length is 4097 tokens, however you requested 5464 tokens (4952 in your prompt; 512 for the completion). Please reduce your prompt' - no = len(image_activities) - if len(prompt) + 512 < 4097: - response = self.openai_completion(prompt) - n = re.search(r'(\d+)', response) - if n: - no = int(n.group(1)) - else: - rospy.logwarn("too long prompt...") - - if no >= len(image_activities): - rospy.loginfo("no is {}, so use random....".format(no)) - no = random.randrange(len(image_activities)) - - answer, timestamp = list(image_activities.items())[no] - rospy.loginfo("topic of the day") - rospy.loginfo(" answer : {}".format(answer)) - rospy.loginfo(" timestamp : {}".format(timestamp)) - results = self.query_images_and_classify(query = answer, - start_time = timestamp - datetime.timedelta(minutes=5), - end_time = timestamp + datetime.timedelta(minutes=5), - classify = False) - if len(results) > 0: - # pubish as card - filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) - self.write_image_with_annotation(filename, results[0], answer) - return {'text': answer, 'filename': filename} - - def make_activity(self, diary_activities_raw = None): - "Returns activity prompts" - # create diary activities_raw - # list of (timestamp, event) [[{'temestamp': , 'state':, 'type': }, {'temestamp': , 'state':, 'type': } ...],[#for 2nd day],[#for 3rd day]...] - if not diary_activities_raw: - mongo_data_days = self.query_mongo_data_days() - diary_activities_raw = self.make_aibo_activities_raw(mongo_data_days) - - # make frequencey data for 7days - # activities_freq {'event_1' : count, 'event_2' : count} - # diary_activities_freq = [activities_freq for day1, activities_freq for day2, ...] - diary_activities_freq = self.make_state_frequency(diary_activities_raw) - - # create activities event data - # activities_events[event_name] = {'duration', datetime.timedelta, 'count': int} - # diary_activities_events = [activities_events for day1, activities_events for day2, ....] - #diary_activities_events = self.make_activities_events(diary_activities_raw, 'aibo_driver/') - diary_activities_events = self.make_activities_events(diary_activities_raw) - diary_recognition_events = self.make_activities_events(diary_activities_raw, - message='jsk_recognition_msgs/VQATaskActionResult') - diary_activities_events = [{k: v for d in L for k, v in d.items()} for L in zip(diary_activities_events, diary_recognition_events)] - - for activities_events in diary_activities_events: - print("--") - for event, duration in sorted(activities_events.items(), key=lambda x: x[1]['duration'], reverse=True): - print("{:24} : {:4.2f} min ({} times)".format(event, duration['duration'].seconds/60, duration['count'])) - - # flatten list - activities_events = [x for events in diary_activities_events for x in events.keys()] # get all activities with duplicates - - if len(activities_events) == 0: - return "" - # percentages of activities happend - prompt = "{}\n\n".format(list(list(filter(None, diary_activities_events))[0].items())[0][1]['last_seen'].strftime("%a %d %b %Y")) - prompt += "\n 'action : time'\n" - - # sort activities event by it's occurence [list] -> sorted({key: count}) - activities_events_freq = sorted({key: activities_events.count(key) for key in set(activities_events)}.items(), key=lambda x:x[1], reverse=True) - always_action = False - for event, count in activities_events_freq[:10]: - if count/float(len(diary_activities_events)) > 0.25: - if next((x for x in diary_activities_events if event in x.keys()), None): - prompt += "{} : {}\n".format(event, next(x for x in diary_activities_events if event in x.keys())[event]['last_seen']) - else: - prompt += "{} : {:.2f}\n".format(event, count/float(len(diary_activities_events))) - always_action = True - - if not always_action: - prompt += "none\n" - - # estimate frequence in 24h - prompt += "\n 'action : increase from the number of time done yesterday'\n" - - more_yesterday_action = False - diary_activities_events_no_empty = list(filter(None, diary_activities_events)) - if len(diary_activities_events_no_empty) >= 2: - l0 = diary_activities_events_no_empty[0] - l1 = diary_activities_events_no_empty[1] - for event in set(activities_events): - if event in l0 and event in l1: - increase = l0[event]['count'] - l1[event]['count'] - if increase > 0: - prompt += "{} : +{}\n".format(event, increase) - more_yesterday_action = True - if not more_yesterday_action: - prompt += "none\n" - - # - prompt += "\n 'action : number of days passed since you last did it'\n" - long_time_action = False - # if we have more than 10 activities, remove lowest events - for event in [x[0] for x in sorted(diary_activities_events[0].items(), key=lambda x: x[1]['duration'].seconds, reverse=True)[:10]] \ - if len(diary_activities_events[0].keys()) > 10 else diary_activities_events[0].keys(): - n = 1 - for diary_activities_event in diary_activities_events[1:]: - if event not in diary_activities_event.keys() or diary_activities_event[event]['duration'].seconds < 1: - n += 1 - else: - break - if n >= 2: - prompt += "{} : {}\n".format(event, n) - long_time_action = True - if not long_time_action: - prompt += "none\n" - - rospy.logdebug(prompt) - return prompt def make_diary(self, language="Japanese"): "make dirary" @@ -490,336 +138,6 @@ def make_diary(self, language="Japanese"): return response - def make_response(self, text, language="Japanese"): - if language=="Japanese": - text = self.openai_completion('Translate the following sentences to English "{}"'.format(text)) - prompt = "You are a baby robot. You were taken care of by people around you. The following data is a record of today's actions regarding what we always do, what we did more of yesterday, and What happened after a long time.\nIf your frined ask you as '{}', wow do you reply? Note, however, that you are a baby robot, so please make it a child-like response.\n\n".format(text) + self.make_activity() - - response = self.openai_completion(prompt) - rospy.loginfo("prompt = {}".format(prompt)) - rospy.loginfo("response = {}".format(response)) - - prompt = "Please rewrite the following response as childlike as you can. Write a maximum 120 {} charactors.\n\n".format(language) + response - response = self.openai_completion(prompt) - rospy.loginfo("prompt = {}".format(prompt)) - rospy.loginfo("response = {}".format(response)) - return {'text': response} - - def make_reply(self, message, lang="en", startdate=datetime.datetime.now(JST)-datetime.timedelta(hours=24), duration=datetime.timedelta(hours=24) ): - enddate = startdate+duration - rospy.logwarn("Run make_reply({} from {} to {})".format(message, startdate, enddate)) - query = self.text_to_salience(message) - rospy.logwarn("query using salience word '{}'".format(query)) - # look for images - try: - # get chat message - results, chat_msgs = self.query_dialogflow(query, startdate, enddate, threshold=0.25) - # retry = 0 - # while retry < 3 and len(results) == 0 and len(chat_msgs.metas) > 0: - # meta = json.loads(chat_msgs.metas[-1].pairs[0].second) - # results, chat_msgs = self.query_dialogflow(query, datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST)) - # retry = retry + 1 - # sort based on similarity with 'query' - chat_msgs_sorted = sorted(results, key=lambda x: x['similarity'], reverse=True) - - if len(chat_msgs_sorted) == 0: - rospy.logwarn("no chat message was found") - else: - # query images that was taken when chat_msgs are stored - msg = chat_msgs_sorted[0]['msg'] - meta = chat_msgs_sorted[0]['meta'] - text = chat_msgs_sorted[0]['message'] - startdate = chat_msgs_sorted[0]['timestamp'] - action = chat_msgs_sorted[0]['action'] - similarity = chat_msgs_sorted[0]['similarity'] - # query chat to get response - #meta = json.loads(chat_msgs_sorted[0]['meta'].pairs[0].second) - # text = msg.message.argument_text or msg.message.text - # startdate = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - rospy.loginfo("Found message '{}'({}) at {}, corresponds to query '{}' with {:2f}%".format(text, action, startdate.strftime('%Y-%m-%d %H:%M:%S'), query, similarity)) - - # query images when chat was received - start_time = startdate # startdate is updated with found chat space - end_time = enddate # enddate is not modified within this function, it is given from chat - results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time) - - # no images found - if len(results) == 0: - return {'text': '記憶がありません🤯'} - - end_time = results[-1]['timestamp'] - - # sort - results = sorted(results, key=lambda x: x['similarities'], reverse=True) - rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), x['similarities']), results)))) - best_result = results[0] - - ''' - # if probability is too low, try again - while len(results) > 0 and results[0]['similarities'] < 0.25: - - start_time = end_time-datetime.timedelta(hours=24) - timestamp = datetime.datetime.now(JST) - results = self.query_images_and_classify(query=query, start_time=start_time, end_time=end_time, limit=300) - if len(results) > 0: - end_time = results[-1]['timestamp'] - # sort - results = sorted(results, key=lambda x: x['similarities'], reverse=True) - #rospy.loginfo("Probabilities of all images {}".format(list(map(lambda x: (x['label'], x['similarities']), results)))) - if len(results) > 0 and results[0]['similarities'] > best_result['similarities']: - best_result = results[0] - - rospy.loginfo("Found '{}' image with {:0.2f} % simiarity at {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'))) - ''' - - ## make prompt - reaction = self.describe_image_scene(best_result['image']) - if len(chat_msgs_sorted) > 0 and chat_msgs_sorted[0]['action'] and 'action' in chat_msgs_sorted[0]: - reaction += " and you felt " + chat_msgs_sorted[0]['action'] - rospy.loginfo("reaction = {}".format(reaction)) - - # make prompt - prompt = 'if you are a pet and someone tells you \"' + message + '\" when we went together, ' + \ - 'and ' + reaction + ' in your memory of that moment, what would you reply? '+ \ - 'Show only the reply in {lang}'.format(lang={'en': 'English', 'ja':'Japanese'}[lang]) - loop = 0 - result = None - while loop < 3 and result is None: - try: - result = self.completion(prompt=prompt,temperature=0) - except rospy.ServiceException as e: - rospy.logerr("Service call failed: %s"%e) - result = None - loop += 1 - result.text = result.text.lstrip().encode('utf8') - rospy.loginfo("prompt = {}".format(prompt)) - rospy.loginfo("result = {}".format(result)) - # pubish as card - filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) - self.write_image_with_annotation(filename, best_result, prompt) - return {'text': result.text, 'filename': filename} - - except Exception as e: - raise ValueError("Query failed {} {}".format(e, traceback.format_exc())) - - - def openai_completion(self, prompt, temperature=0): - loop = 0 - result = None - while loop < 5 and result is None: - try: - result = self.completion(prompt=prompt,temperature=temperature) - if result.text == '': - rospy.logwarn(result) - rospy.logwarn("result text is too short, retry completion") - rospy.sleep(2) - result = None - except rospy.ServiceException as e: - rospy.logerr("Service call failed: %s"%e) - rospy.sleep(2) - result = None - loop += 1 - if result is None: - raise Exception('[ERROR] openni_completion failed to complete {}'.format(prompt)) - result.text = result.text.lstrip() - rospy.logdebug("prompt = {}".format(prompt)) - rospy.logdebug("result = {}".format(result)) - return result.text - - def write_image_with_annotation(self, filename, best_result, prompt): - image = bridge.compressed_imgmsg_to_cv2(best_result['image']) - _, width, _ = image.shape - scale = width/640.0 - if 'label' in best_result and 'similarities' in best_result: - cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), - (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 8, 1) - cv2.putText(image, "{} ({:.2f}) {}".format(best_result['label'], best_result['similarities'], best_result['timestamp'].strftime('%Y-%m-%d %H:%M:%S')), - (10,int(20*scale)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 2, 1) - string_width = 70 - for i in range(0, len(prompt), string_width): # https://stackoverflow.com/questions/13673060/split-string-into-strings-by-length - text = prompt[i:i+string_width] - cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (255,255,255), 4, 1) - cv2.putText(image, text, (10,int(43*scale)+int(i/string_width*20)), cv2.FONT_HERSHEY_SIMPLEX, 0.5*scale, (0,0,0), 1, 1) - cv2.imwrite(filename, image) - rospy.logwarn("save images to {}".format(filename)) - - - def query_dialogflow(self, query, start_time, end_time, limit=30, threshold=0.0): - rospy.logwarn("Query dialogflow from {} until {}".format(start_time, end_time)) - meta_query= {'inserted_at': {"$lt": end_time, "$gt": start_time}} - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) - chat_msgs = self.query(database = 'jsk_robot_lifelog', - collection = self.robot_name, - # type = 'google_chat_ros/MessageEvent', - type = 'dialogflow_task_executive/DialogTextActionResult', - single = False, - # limit = limit, - meta_query = StringPairList(meta_tuple), - sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) - - # optimization... send translate once - messages = '' - for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): - msg = deserialise_message(msg) - message = msg.result.response.query.replace('\n','') - messages += message + '\n' - messages = self.translate(messages, dest="en").text.split('\n') - - # show chats - results = [] - for msg, meta in zip(chat_msgs.messages, chat_msgs.metas): - msg = deserialise_message(msg) - meta = json.loads(meta.pairs[0].second) - timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - # message = msg.message.argument_text or msg.message.text - message = msg.result.response.query - #message_translate = self.translate(message, dest="en").text - message_translate = messages.pop(0).strip() - result = {'message': message, - 'message_translate': message_translate, - 'timestamp': timestamp, - 'similarity': difflib.SequenceMatcher(None, query, message_translate).ratio(), - 'action': msg.result.response.action, - 'msg': msg, - 'meta': meta} - if msg.result.response.action in ['make_reply', 'input.unknown']: - rospy.logwarn("Found dialogflow messages {}({}) at {} but skipping (action:{})".format(result['message'], result['message_translate'], result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), msg.result.response.action)) - else: - rospy.loginfo("Found dialogflow messages {}({}) ({}) at {} ({}:{:.2f})".format(result['message'], result['message_translate'], msg.result.response.action, result['timestamp'].strftime('%Y-%m-%d %H:%M:%S'), query, result['similarity'])) - if ( result['similarity'] > threshold): - results.append(result) - else: - rospy.logwarn(" ... skipping (threshold: {:.2f})".format(threshold)) - - - return results, chat_msgs - - - def query_images_and_classify(self, query, start_time, end_time, limit=10, classify=True): - rospy.logwarn("Query images from {} to {}".format(start_time, end_time)) - meta_query= {#'input_topic': '/spot/camera/hand_color/image/compressed/throttled', - 'inserted_at': {"$gt": start_time, "$lt": end_time}} - meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) - msgs = self.query(database = 'jsk_robot_lifelog', - collection = self.robot_name, - type = 'sensor_msgs/CompressedImage', - single = False, - limit = limit, - meta_query = StringPairList(meta_tuple), - sort_query = StringPairList([StringPair('_meta.inserted_at', '-1')])) - - rospy.loginfo("Found {} images".format(len(msgs.messages))) - if len(msgs.messages) == 0: - rospy.logwarn("no images was found") - - # get contents of images - results = [] - for msg, meta in zip(msgs.messages, msgs.metas): - meta = json.loads(meta.pairs[0].second) - timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) - # rospy.logwarn("Found images at {}".format(timestamp)) - - result = {'query' : query, 'image' : deserialise_message(msg), 'timestamp': timestamp} - if classify: - goal = ClassificationTaskGoal() - goal.compressed_image = result['image'] - goal.queries = [query] - self.classification_ac.send_goal(goal) - self.classification_ac.wait_for_result() - result = self.classification_ac.get_result() - idx = result.result.label_names.index(query) - #similarities = result.result.probabilities - similarities = result.result.label_proba - # rospy.logwarn(" ... {}".format(list(zip(result.result.label_names, map(lambda x: "{:.2f}".format(x), similarities))))) - rospy.logwarn("Found images at {} .. {}".format(timestamp, list(zip(result.result.label_names, map(lambda x: "{:.4f}".format(x), similarities))))) - result.update({'label': result.result.label_names[idx], 'probabilities': result.result.probabilities[idx], 'similarities': result.result.label_proba[idx]}) - results.append(result) - - # we do not sorty by probabilites, becasue we also need oldest timestamp - return results - - def describe_image_scene(self, image): - goal = VQATaskGoal() - goal.compressed_image = image - - # unusual objects - if random.randint(0,1) == 1: - goal.questions = ['what unusual things can be seen?'] - reaction = 'you saw ' - else: - goal.questions = ['what is the atmosphere of this place?'] - reaction = 'the atmosphere of the scene was ' - - # get vqa result - self.vqa_ac.send_goal(goal) - self.vqa_ac.wait_for_result() - result = self.vqa_ac.get_result() - reaction += result.result.result[0].answer - return reaction - - def publish_google_chat_card(self, text, space, filename=None): - goal = SendMessageGoal() - goal.text = text - if filename: - goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] - goal.space = space - rospy.logwarn("send {} to {}".format(goal.text, goal.space)) - self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) - - def text_to_salience(self, text): - goal = AnalyzeTextGoal() - goal.text = text; - self.analyze_text_ac.send_goal(goal) - self.analyze_text_ac.wait_for_result() - entity = self.analyze_text_ac.get_result() - if len(entity.entities) > 0: - return entity.entities[0].name - else: - return text - - def translate(self, text, dest): - global translator - loop = 3 - while loop > 0: - try: - ret = translator.translate(text, dest="en") - return ret - except Exception as e: - rospy.logwarn("Faile to translate {}".format(e)) - time.sleep(1) - translator = Translator() - loop = loop - 1 - return Translated(text=text, dest=dest) - - - def cb(self, msg): - space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot - if msg._type == 'google_chat_ros/MessageEvent': - text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() - space = msg.space.name - rospy.logwarn("Received chat message '{}'".format(text)) - else: - rospy.logerr("Unknown message type {}".format(msg._type)) - return - - try: - language = 'English' if is_ascii(text) else 'Japanese' - if any(x in text for x in ['diary', '日記']): - self.publish_google_chat_card("Sure!", space) - ret = self.make_diary(language) - if 'filename' in ret: - # upload text first, then upload images - self.publish_google_chat_card(ret['text'], space) - self.publish_google_chat_card('', space, ret['filename']) - else: - self.publish_google_chat_card(ret['text'], space) - else: - ret = self.make_response(text, language) - self.publish_google_chat_card(ret['text'], space) - - except Exception as e: - rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) - self.publish_google_chat_card("💀 {}".format(e), space) if __name__ == '__main__': parser = argparse.ArgumentParser() diff --git a/database_talker/scripts/make_diary.py b/database_talker/scripts/make_diary.py new file mode 100755 index 0000000000..b4c6a6bb93 --- /dev/null +++ b/database_talker/scripts/make_diary.py @@ -0,0 +1,44 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import rospy +import argparse +import logging +import sys + +import datetime +from dateutil import tz +JST = tz.gettz('Asia/Tokyo') + +from database_talker import DatabaseTalkerBase + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--test', action='store_true') + parser.add_argument('--prompt-type', default='basic', choices=['basic','personality']) + today_string=datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') + parser.add_argument('--date', default=today_string, help="use {} or {}".format(today_string, datetime.datetime.today().strftime('%Y-%m-%d'))) + + args = parser.parse_args(rospy.myargv()[1:]) + + rospy.init_node('database_talker', anonymous=True) + + logger = logging.getLogger('rosout') + logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) + + try: + start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d') + except: + try: + start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d %H:%M:%S') + except: + rospy.logerr("Invalid date format") + sys.exit(1) + + ml = DatabaseTalkerBase(start_date=start_date, wait_for_chat_server=not args.test, use_activities_cache=not args.test, prompt_type=args.prompt_type) + if args.test: + ret = ml.make_diary() + if 'filename' in ret: + rospy.loginfo("image is saved at {}".format(ret['filename'])) + sys.exit(0) + rospy.spin() diff --git a/database_talker/scripts/make_lovot_diary.py b/database_talker/scripts/make_lovot_diary.py new file mode 100755 index 0000000000..2be054d887 --- /dev/null +++ b/database_talker/scripts/make_lovot_diary.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +import rospy +import argparse +import logging +import sys + +import datetime +from dateutil import tz +JST = tz.gettz('Asia/Tokyo') + +from database_talker import DatabaseTalkerBase + +class LovotDatabaseTalker(DatabaseTalkerBase): + + def __init__(self, *args, **kwargs): + + self.make_robot_activities_raw = self.make_lovot_activities_raw + super(LovotDatabaseTalker, self).__init__(*args, **kwargs) + + # override query_type after super__.init() + self.query_types = ['lovot_driver/StringStamped', + 'jsk_recognition_msgs/VQATaskActionResult'] + + rospy.loginfo("all done, ready") + + + def make_lovot_activities_raw(self, mongo_data_days = None): + "Create lovot activities for several days, returns list of list of tuple(temestamp, event)" + # list of list of tuples (msg, meta) [[(msg, meta), (msg, meta),...],[#for 2nd day], [#for 3rd day]] + if not mongo_data_days: + mongo_data_days = self.query_mongo_data_days() + diary_activities_raw = [] ## (timestamp, event) + for mongo_data in mongo_data_days: + rospy.loginfo("Found {} mongo data (make_lovot_activities_raw)".format(len(mongo_data))) + rospy.loginfo(" types : {}".format(list(set([x[1]['stored_type'] for x in mongo_data])))) + activities_raw = [] + input_topics = [] + for msg, meta in mongo_data: + state = [] + timestamp = datetime.datetime.fromtimestamp(meta['timestamp']//1000000000, JST) + input_topics.append(meta['input_topic']) + rospy.logwarn("{} {}".format(timestamp, msg.data)) + if meta['stored_type'] == 'lovot_driver/StringStamped': + if msg.data in ['HUGGED_LONG_TIME', 'CARRIED_TO_NEST', 'HUGGED']: + state = ['BE {}'.format(msg.data.replace('_',' '))] + elif msg.data in ['STROKE_MANY_TIMES']: + state = ['BE STOROKED MANY TIMES'] + elif msg.data in ['HELP', 'STROKE']: + state = ['BE {}ED'.format(msg.data)] + elif msg.data in ['CALL_NAME']: + state = ['BE CALLED MY NAME'] + elif msg.data in ['OUCH']: + state = ['BE BEATEN AND SAY OUCH'] + elif msg.data in 'TOUCH_NOSE': + state = ['BE TOUCHED MY NOSE'] + elif msg.data in ['MIMIC_GAME', 'PUSH_AND_PULL']: + state = ['PLAY {}'.format(msg.data.replace('_',' '))] + elif msg.data in ['BEAUTIFUL_RETURN']: + state = ['RETURN TO THE NEST SMOOTHLY'] + else: + state = [msg.data] + else: + rospy.logwarn("Unknown stored type: {}".format(meta['stored_type'])) + continue + # create activities_raw + for s in state: + activities_raw.append({'timestamp': timestamp, 'state': s, 'type': meta['stored_type']}) + + diary_activities_raw.append(activities_raw) + + if len(activities_raw) > 0: + rospy.loginfo(" period : {} {}".format(activities_raw[-1]['timestamp'], activities_raw[0]['timestamp'])) + rospy.loginfo(" topics : {}".format({key: input_topics.count(key) for key in set(input_topics)})) + ## + return diary_activities_raw ## (timestamp, event) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--test', action='store_true') + parser.add_argument('--prompt-type', default='basic', choices=['basic','personality']) + today_string=datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') + parser.add_argument('--date', default=today_string, help="use {} or {}".format(today_string, datetime.datetime.today().strftime('%Y-%m-%d'))) + + args = parser.parse_args(rospy.myargv()[1:]) + + rospy.init_node('database_talker', anonymous=True) + + logger = logging.getLogger('rosout') + logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) + + try: + start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d') + except: + try: + start_date = datetime.datetime.strptime(args.date, '%Y-%m-%d %H:%M:%S') + except: + rospy.logerr("Invalid date format") + sys.exit(1) + + ml = LovotDatabaseTalker(start_date=start_date, wait_for_chat_server=not args.test, use_activities_cache=not args.test, prompt_type=args.prompt_type) + if args.test: + ret = ml.make_diary() + if 'filename' in ret: + rospy.loginfo("image is saved at {}".format(ret['filename'])) + sys.exit(0) + rospy.spin() From 2393c4757c06b690a254fb0aa681f850609ab93a Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 15:55:38 +0900 Subject: [PATCH 35/60] update README.md --- database_talker/README.md | 56 +++++++++++++++++++++++---------------- 1 file changed, 33 insertions(+), 23 deletions(-) diff --git a/database_talker/README.md b/database_talker/README.md index 1f7b39e3c1..f68d2241da 100644 --- a/database_talker/README.md +++ b/database_talker/README.md @@ -1,34 +1,44 @@ -# hoge.py +# database talker -What is this? -## Requirements +## What is this? -See `requirements.txt` for python requirements. +This is sample code to generate a response/diary from a robot's experience stored in MongoDB. -For ROS dependency. +## How to setup -- `google_chat_ros` in `jsk_3rdparty` with [this improvement](https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/451) -- `dialogflow_client` in `dialogflow_task_executive` package in `jsk_3rdparty` with [this improvement](https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/451) -- `mongodb_store` with https://github.com/strands-project/mongodb_store/pull/282 -- CLIP VQA ros node introduced with https://github.com/jsk-ros-pkg/jsk_recognition/pull/2730. -- `ros_google_cloud_language` package in `jsk_3rdparty` +Set up a workspace using the `rosinstall` file and compile it with `catkin build database_talker`. ## How to use -1. Setup google chat ros with Cloud Pub/Sub - 1. prepare `credential_json` and `project_id` and `subscription_id` -2. Setup dialogflow - 1. prepare `credential_json` and `project_id` -3. Setup mongodb_store - 1. Create database with mondodb -4. Setup CLIP VQA node - 1. Make docker model - 2. Run ROS Interface node -5. Setup google cloud natural language - 1. Prepare `credential_json` +For a minimum setup, run the following command. This will start the mongodb/lifelog nodes and save the usb camera data to the database. +```bash +roslaunch database_talker demo.launch +``` -And run +To generate a diary using robot memory, execute the following command and talk to GoogleChat bot. ```bash -roslaunch database_talker demo.launch +rosrun database_talker make_diary.py --prompt-type personality +``` + +## Tips + +### How to test using data from a specific date without using GoogleChat. +```bash +rosrun database_talker make_diary.py --test --prompt-type personality --date 2023-03-20 +``` + +### Stop using external DBs, this is recommended during debug phase. + +Remove `mongodb_store_extras` in `jsk_robot_startup/lifelog/mongodb_replication_params.yaml` +``` +-mongodb_store_extras: [["robot-database.jsk.imi.i.u-tokyo.ac.jp", 27017],["musca.jsk.imi.i.u-tokyo.ac.jp",27017]] ++mongodb_store_extras: [] +``` + +### Force store image + +An image will only be saved if a significant change is found in the image within seconds. To force the image to be saved, use the following command. ``` +rostopic pub -1 /publish_trigger_mongodb_event roseus/StringStamped '{header: auto, data: debug}' +``` \ No newline at end of file From 714242ea3d56ef15a9946aefb6cbeea954ecaf8a Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 15:55:59 +0900 Subject: [PATCH 36/60] add more depends to package.xml --- database_talker/package.xml | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/database_talker/package.xml b/database_talker/package.xml index 8f56000d06..00094dc2bf 100644 --- a/database_talker/package.xml +++ b/database_talker/package.xml @@ -1,5 +1,5 @@ - + database_talker 0.0.0 The database_talker package @@ -7,11 +7,19 @@ sktometometo - TODO + BSD catkin catkin_virtualenv + mongodb_store + usb_cam + jsk_robot_startup + openai_ros + google_chat_ros + gdrive_ros + dialogflow_task_executive + ros_google_cloud_language requirements.txt From 42e114bba67b8ab089862b231b595b3ace24dec1 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 15:58:08 +0900 Subject: [PATCH 37/60] src/database_talker/__init__.py: fix for headless mode --- database_talker/src/database_talker/__init__.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index 3806cdd6b9..1a607acc83 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -27,6 +27,10 @@ # import time import traceback +IsHeadless = False +if 'DISPLAY' not in os.environ: + IsHeadless = True + from dateutil import tz JST = tz.gettz('Asia/Tokyo') @@ -277,7 +281,7 @@ def make_image_activities(self, diary_activities_raw = None): start_time = timestamp - datetime.timedelta(minutes=5), end_time = timestamp + datetime.timedelta(minutes=5), classify = False) - if True: + if not IsHeadless: cv2.imshow('images of today', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])) cv2.waitKey(100) @@ -546,7 +550,7 @@ def make_response(self, text, language="Japanese"): end_time = end_time, classify = False) if len(results) > 0: - if True: # debug + if not IsHeadless: # debug try: cv2.imshow('images for response', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])) cv2.waitKey(100) From e31b072aeea9197b7773d1d559f91f0b471cd972 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 15:59:01 +0900 Subject: [PATCH 38/60] put store_image_description into sample/include/ --- database_talker/CMakeLists.txt | 2 +- .../sample/include/store_image_description.py | 126 ++++++++++++++++++ 2 files changed, 127 insertions(+), 1 deletion(-) create mode 100755 database_talker/sample/include/store_image_description.py diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt index 624c928028..13c0f3340b 100644 --- a/database_talker/CMakeLists.txt +++ b/database_talker/CMakeLists.txt @@ -15,6 +15,6 @@ catkin_package( ) catkin_install_python(PROGRAMS - scripts/hoge.py + sample/include/store_image_description.py DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/database_talker/sample/include/store_image_description.py b/database_talker/sample/include/store_image_description.py new file mode 100755 index 0000000000..8f31bc072f --- /dev/null +++ b/database_talker/sample/include/store_image_description.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python + +import rospy +import actionlib +from sensor_msgs.msg import CompressedImage +import os +from importlib import import_module + +import cv2 +IsHeadless = False +if 'DISPLAY' not in os.environ: + IsHeadless = True +import numpy as np +from cv_bridge import CvBridge + +import json +import base64 +from openai_ros.srv import ChatCompletions, ChatCompletionsRequest + +from std_msgs.msg import Header +from jsk_recognition_msgs.msg import VQATaskActionResult, VQATaskResult, VQAResult, QuestionAndAnswerText + +from roseus.msg import StringStamped + +bridge = CvBridge() +result_pub = None +image_pub = None +chat_completion = None +images = [np.array(cv2.imencode('.jpg', np.zeros((120,160,3), np.uint8))[1]).tostring()] + +def vqa(question, images, temperature = 0.0, max_tokens = 300, debug = False): + global chat_completion + + # debug + if (not IsHeadless) and debug and len(images)>0: + cv2.imshow('debug', cv2.hconcat([cv2.imdecode(np.fromstring(image, np.uint8), cv2.IMREAD_COLOR) for image in images])) + cv2.waitKey(100) + + image_urls = [{'type': 'image_url', 'image_url' : {'url': 'data:image/jpeg;base64,'+base64.b64encode(image).decode('utf-8')}} for image in images] + req = ChatCompletionsRequest(model = 'gpt-4-vision-preview', + messages = json.dumps([{"role": "user", + "content": [ {"type": "text", "text": question} ] + + image_urls }]), + temperature = temperature, max_tokens=max_tokens) + rospy.loginfo("{}".format(req.messages[0:255])) + + ret = chat_completion(req) + answer = ret.content + + rospy.loginfo('Q: {}'.format(question)) + rospy.loginfo('- {}'.format(answer)) + + return answer + +debug_msg = StringStamped() +def debug_cb(data): + connection_header = data._connection_header['type'].split('/') + ros_pkg = connection_header[0] + '.msg' + msg_type = connection_header[1] + msg_class = getattr(import_module(ros_pkg), msg_type) + rospy.loginfo("received {}/{}".format(ros_pkg, msg_type)) + global debug_msg + if msg_class is StringStamped: + msg = msg_class().deserialize(data._buff) + debug_msg = msg + else: + debug_msg = StringStamped(header = rospy.Header(stamp=rospy.Time.now()), data="debug") + # + return + global mongodb_event_sub + mongodb_event_sub.unregister() + mongodb_event_sub = rospy.Subscriber('/publish_trigger_mongodb_event', rospy.AnyMsg, debug_cb, queue_size=1) + +def cb(msg): + rospy.logerr("debug cb") + global chat_completion + global images + global result_pub, image_pub + global debug_msg + + small_msg_data = np.array(cv2.imencode('.jpg', cv2.resize(cv2.imdecode(np.fromstring(msg.data, np.uint8), cv2.IMREAD_COLOR),(160,120)))[1]).tostring() + if len(images) == 0: + images.extend([small_msg_data]) + + questions = rospy.get_param('~questions', ['Provide a brief caption under 140 characters for this image, focusing on the most striking aspect and overall atmosphere.']) + question = ' '.join(questions) if type(questions) == list else questions + + if (not IsHeadless): + cv2.imshow('debug', cv2.hconcat([cv2.imdecode(np.fromstring(image, np.uint8), cv2.IMREAD_COLOR) for image in images])) + cv2.waitKey(100) + + # use VQA to filter new image (DO NOT USE THIS, THIS COSTS TOO HIGH) + ''' + use_this_image_answer = vqa(# "Does the last image is totally different from and more impressive than the rest of images?, Please answer YES or NO.", + # "Focusing on the subject matter of the images, is the last image portraying a completely different theme or subject than the earlier images? Please respond with YES or NO and identify the theme or subject of all images.", + "Focusing on the subject matter of the images, is the first image portraying a completely different theme or subject than the {}? Please respond with YES or NO and identify the theme or subject of all images.".format('earlier images' if len(images)>1 else 'other image'), + [small_msg_data] + images, temperature = 1.0, debug=True) + use_this_image = 'YES' in use_this_image_answer[:10] + ''' + if abs((rospy.Time.now() - debug_msg.header.stamp).to_sec()) < 5 and debug_msg.data == 'debug': + images.extend([small_msg_data]) + if len(images) > 10: + images = images[1:] + + answer = vqa(question, [msg.data], temperature = 1.0) + result_pub.publish(VQATaskActionResult(header=Header(stamp=rospy.Time.now()), + result=VQATaskResult(result=VQAResult(result=[QuestionAndAnswerText(question=question, answer=answer)]), done=True))) + image_pub.publish(msg) + return + +if __name__ == '__main__': + try: + rospy.init_node('store_image_description', anonymous=True) + debug_msg = StringStamped(header=Header(stamp=rospy.Time.now())) + rospy.loginfo("wait for '/openai/chat_completions'") + rospy.wait_for_service('/openai/chat_completions') + chat_completion = rospy.ServiceProxy('/openai/chat_completions', ChatCompletions) + + result_pub = rospy.Publisher("~result", VQATaskActionResult, queue_size=1) + image_pub = rospy.Publisher("~result/image/compressed", CompressedImage, queue_size=1) + mongodb_event_sub = rospy.Subscriber('/publish_trigger_mongodb_event', rospy.AnyMsg, debug_cb, queue_size=1) + rospy.Subscriber('image', CompressedImage, cb, queue_size=1) + rospy.loginfo("start subscribing {}".format(rospy.resolve_name('image'))) + rospy.spin() + except rospy.ROSInterruptException: + pass From 2b085e4f6b94b5a43af7163534c79d92d6039d41 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 16:00:32 +0900 Subject: [PATCH 39/60] add sample/sample.launch --- database_talker/README.md | 2 +- database_talker/launch/demo.launch | 62 -------------- database_talker/sample/include/resize.launch | 31 +++++++ database_talker/sample/sample.launch | 89 ++++++++++++++++++++ 4 files changed, 121 insertions(+), 63 deletions(-) delete mode 100644 database_talker/launch/demo.launch create mode 100644 database_talker/sample/include/resize.launch create mode 100644 database_talker/sample/sample.launch diff --git a/database_talker/README.md b/database_talker/README.md index f68d2241da..13155279ec 100644 --- a/database_talker/README.md +++ b/database_talker/README.md @@ -12,7 +12,7 @@ Set up a workspace using the `rosinstall` file and compile it with `catkin build For a minimum setup, run the following command. This will start the mongodb/lifelog nodes and save the usb camera data to the database. ```bash -roslaunch database_talker demo.launch +roslaunch database_talker sample.launch ``` To generate a diary using robot memory, execute the following command and talk to GoogleChat bot. diff --git a/database_talker/launch/demo.launch b/database_talker/launch/demo.launch deleted file mode 100644 index c1fddc9059..0000000000 --- a/database_talker/launch/demo.launch +++ /dev/null @@ -1,62 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - pixel_format: yuyv - - - - - - - diff --git a/database_talker/sample/include/resize.launch b/database_talker/sample/include/resize.launch new file mode 100644 index 0000000000..5df08ed8fc --- /dev/null +++ b/database_talker/sample/include/resize.launch @@ -0,0 +1,31 @@ + + + + + + + + + + + + + + + + + + + + + + + diff --git a/database_talker/sample/sample.launch b/database_talker/sample/sample.launch new file mode 100644 index 0000000000..2122742d76 --- /dev/null +++ b/database_talker/sample/sample.launch @@ -0,0 +1,89 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + topics: + - /store_image_description/result + + + + + + + + + + + + + + + + + + + + + + + + From be2c1ac003d2e6f6896266ff8cfb62b4f761ef99 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 16:01:58 +0900 Subject: [PATCH 40/60] update rosinstall --- database_talker/rosinstall | 42 ++++++++++++++++++++++++++++++-------- 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/database_talker/rosinstall b/database_talker/rosinstall index 4963918407..ffc201df4e 100644 --- a/database_talker/rosinstall +++ b/database_talker/rosinstall @@ -1,12 +1,36 @@ +## +## rosdep install --from-path . --ignores-rc +## catkin build database_talker +## +# +# database_talker demos +# see https://github.com/jsk-ros-pkg/jsk_demos/pull/1388 - git: - local-name: jsk-ros-pkg/jsk_3rdparty - uri: https://github.com/k-okada/jsk_3rdparty.git - version: add_more_functions + local-name: jsk_demos + uri: https://github.com/sktometometo/jsk_demos.git + version: PR/hoge-py +# https://github.com/jsk-ros-pkg/jsk_robot/pull/1792 - git: - local-name: jsk-ros-pkg/jsk_recognition - uri: https://github.com/mqcmd196/jsk_recognition.git - version: PR/large-scale-vil + local-name: jsk_robot + uri: https://github.com/k-okada/jsk_robot.git + version: ichikura_sample +# wait until https://github.com/jsk-ros-pkg/jsk_3rdparty/pull/504 - git: - local-name: strands-project/mongodb_store - uri: https://github.com/k-okada/mongodb_store.git - version: patch-1 + local-name: jsk_3rdparty + uri: https://github.com/jsk-ros-pkg/jsk_3rdparty.git + version: master +# lovot driver +- git: + local-name: lovot_driver + uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/ichikura/lovot.git + version: okada_ros_version +# aibo driver +- git: + local-name: aibo_driver + uri: https://gitlab.jsk.imi.i.u-tokyo.ac.jp/k-okada/aibo_status.git + version: driver +# openai_ros, with latest endpoint +- git: + local-name: openai_ros + uri: https://github.com/k-okada/openai_ros.git + version: use_ros From c361fd491a22f9a52358f62b350c0b900a0a628b Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 29 Mar 2024 16:14:23 +0900 Subject: [PATCH 41/60] install store_image_descripton and make_diary.py --- database_talker/CMakeLists.txt | 2 +- database_talker/sample/include/store_image_description.py | 0 database_talker/scripts/make_diary.py | 0 3 files changed, 1 insertion(+), 1 deletion(-) mode change 100755 => 100644 database_talker/sample/include/store_image_description.py mode change 100755 => 100644 database_talker/scripts/make_diary.py diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt index 13c0f3340b..9d7c48311d 100644 --- a/database_talker/CMakeLists.txt +++ b/database_talker/CMakeLists.txt @@ -15,6 +15,6 @@ catkin_package( ) catkin_install_python(PROGRAMS - sample/include/store_image_description.py + sample/include/store_image_description.py scripts/make_diary.py DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION} ) diff --git a/database_talker/sample/include/store_image_description.py b/database_talker/sample/include/store_image_description.py old mode 100755 new mode 100644 diff --git a/database_talker/scripts/make_diary.py b/database_talker/scripts/make_diary.py old mode 100755 new mode 100644 From 627827226b29beea36d9eafa4e09dca9ad749d7c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Wed, 3 Apr 2024 18:01:14 +0900 Subject: [PATCH 42/60] use USE_SYSTEM_PACKAGES FALSE to avoid bson error --- database_talker/CMakeLists.txt | 1 + database_talker/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/database_talker/CMakeLists.txt b/database_talker/CMakeLists.txt index 9d7c48311d..2fd8ff2bb2 100644 --- a/database_talker/CMakeLists.txt +++ b/database_talker/CMakeLists.txt @@ -9,6 +9,7 @@ catkin_python_setup() catkin_generate_virtualenv( PYTHON_INTERPRETER python3 CHECK_VENV FALSE + USE_SYSTEM_PACKAGES FALSE # Default TRUE ) catkin_package( diff --git a/database_talker/requirements.txt b/database_talker/requirements.txt index 1e0be444af..f9a3dfe469 100644 --- a/database_talker/requirements.txt +++ b/database_talker/requirements.txt @@ -1,2 +1,2 @@ -bson -pymongo +pymongo==3.10.1 +opencv-python==4.2.0.34 From bdea63b7b5d2775f58fb774465633d1f3dfedb67 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 12 Apr 2024 16:19:31 +0900 Subject: [PATCH 43/60] add launch/aibo_diary.launch launch/include/database_talker.launch --- database_talker/launch/aibo_diary.launch | 10 +++ .../launch/include/database_talker.launch | 65 +++++++++++++++++++ 2 files changed, 75 insertions(+) create mode 100644 database_talker/launch/aibo_diary.launch create mode 100644 database_talker/launch/include/database_talker.launch diff --git a/database_talker/launch/aibo_diary.launch b/database_talker/launch/aibo_diary.launch new file mode 100644 index 0000000000..246cd1f8af --- /dev/null +++ b/database_talker/launch/aibo_diary.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/database_talker/launch/include/database_talker.launch b/database_talker/launch/include/database_talker.launch new file mode 100644 index 0000000000..9913b6ef9e --- /dev/null +++ b/database_talker/launch/include/database_talker.launch @@ -0,0 +1,65 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + topics: + - /store_image_description/result + + + + + + + + + + + + + + + + + + + From d4fdd245047b5e6cfd89b7a247f14de8eb5a70b6 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sun, 14 Apr 2024 17:17:13 +0900 Subject: [PATCH 44/60] make_aibo_diary.py: need to call super(MessageListner before super(), add missing imports --- database_talker/scripts/make_aibo_diary.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index c7164511c0..c2dc769436 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -5,19 +5,23 @@ import argparse import logging import sys +import datetime +from dateutil import tz +JST = tz.gettz('Asia/Tokyo') from database_talker import DatabaseTalkerBase class MessageListener(DatabaseTalkerBase): def __init__(self, *args, **kwargs): + self.make_robot_activities_raw = self.make_aibo_activities_raw + super(MessageListener, self).__init__(*args, **kwargs) + # override query_type after super__.init() self.query_types = ['aibo_driver/StringStatus', 'aibo_driver/ObjectStatusArray', 'jsk_recognition_msgs/VQATaskActionResult'] - self.make_robot_activities_raw = self.make_aibo_activities_raw - super(MessageListener, self).__init__(*args, **kwargs) rospy.loginfo("all done, ready") From 80d3f1f14a966c5cd6f382617a8cda7f332e786d Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Sun, 14 Apr 2024 17:17:46 +0900 Subject: [PATCH 45/60] src/database_talker/__init__.py: add info mesage for types --- database_talker/src/database_talker/__init__.py | 1 + 1 file changed, 1 insertion(+) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index 1a607acc83..a2e67d084c 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -139,6 +139,7 @@ def query_multiple_types(self, types, meta_tuple): def query_mongo_data(self, types, start_time, end_time): "Query activities for aibo robot, returns list of tuple (msg, meta)" rospy.logwarn("Query activities from {} until {}".format(start_time, end_time)) + rospy.logwarn(" for types {}".format(types)) meta_query= {'published_at': {"$lt": end_time, "$gt": start_time}} meta_tuple = (StringPair(MongoQueryMsgRequest.JSON_QUERY, json.dumps(meta_query, default=json_util.default)),) mongo_msgs = self.query_multiple_types(types, meta_tuple) From e1af22eda86f40944579a57226fc7ba31501ebde Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 25 Apr 2024 11:07:15 +0900 Subject: [PATCH 46/60] database_talker/src/database_talker/__init__.py: cleanup debug message --- database_talker/src/database_talker/__init__.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index a2e67d084c..2b7546bc48 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -856,9 +856,9 @@ def cb(self, msg): if msg._type == 'google_chat_ros/MessageEvent': text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() space = msg.space.name - rospy.logwarn("Received chat message '{}'".format(text)) + rospy.logwarn("Received chat message '{}' on {}".format(text, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S'))) else: - rospy.logerr("Unknown message type {}".format(msg._type)) + rospy.logerr("Unknown message type {} on {}".format(msg._type, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S'))) return try: @@ -875,7 +875,7 @@ def cb(self, msg): # remove cache #### FIXME self.use_activities_cache = False except Exception as e: - rospy.logwarn("No date information included {}".format(e)) + rospy.logwarn("No date information included in '{}' ({})".format(text, e)) ret = self.make_diary(language) if 'filename' in ret: From 83da0d9b2eb23c3fc976aa96eadb15f914410ac2 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 25 Apr 2024 11:08:11 +0900 Subject: [PATCH 47/60] launch/include/database_talker.launch: add start_periodic_mongodb_trigger option --- database_talker/launch/include/database_talker.launch | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/database_talker/launch/include/database_talker.launch b/database_talker/launch/include/database_talker.launch index 9913b6ef9e..15ecfeec47 100644 --- a/database_talker/launch/include/database_talker.launch +++ b/database_talker/launch/include/database_talker.launch @@ -3,6 +3,7 @@ + @@ -22,7 +23,7 @@ - From 73d38f2360dddbf9d736fda48121b85d6c39927c Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 25 Apr 2024 11:08:57 +0900 Subject: [PATCH 48/60] sample/include/store_image_description.py write to /tmp/image.jpg, if vqa failed --- .../sample/include/store_image_description.py | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/database_talker/sample/include/store_image_description.py b/database_talker/sample/include/store_image_description.py index 8f31bc072f..7278a944ab 100644 --- a/database_talker/sample/include/store_image_description.py +++ b/database_talker/sample/include/store_image_description.py @@ -102,10 +102,15 @@ def cb(msg): if len(images) > 10: images = images[1:] - answer = vqa(question, [msg.data], temperature = 1.0) - result_pub.publish(VQATaskActionResult(header=Header(stamp=rospy.Time.now()), - result=VQATaskResult(result=VQAResult(result=[QuestionAndAnswerText(question=question, answer=answer)]), done=True))) - image_pub.publish(msg) + try: + answer = vqa(question, [msg.data], temperature = 1.0) + result_pub.publish(VQATaskActionResult(header=Header(stamp=rospy.Time.now()), + result=VQATaskResult(result=VQAResult(result=[QuestionAndAnswerText(question=question, answer=answer)]), done=True))) + image_pub.publish(msg) + except Exception as e: + filename = '/tmp/image.jpg' + rospy.logerr("write current image to {}, due to {}".format(filename, e)) + cv2.imwrite(filename, cv2.imdecode(np.fromstring(msg.data, np.uint8), cv2.IMREAD_COLOR)) return if __name__ == '__main__': From fd03657a9ac14f398b119e8b20bd49b25ae1d0f6 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 25 Apr 2024 11:09:25 +0900 Subject: [PATCH 49/60] sample/sample.launch, fix location of resize.launch --- database_talker/sample/sample.launch | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/database_talker/sample/sample.launch b/database_talker/sample/sample.launch index 2122742d76..021cda29a0 100644 --- a/database_talker/sample/sample.launch +++ b/database_talker/sample/sample.launch @@ -83,7 +83,7 @@ - + From 124a7ae9ddc21a75686157405fb2580393d494d0 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 25 Apr 2024 11:10:40 +0900 Subject: [PATCH 50/60] launch/aibo_diary.launch: start usb_cam, and log aibo data --- database_talker/launch/aibo_diary.launch | 42 ++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/database_talker/launch/aibo_diary.launch b/database_talker/launch/aibo_diary.launch index 246cd1f8af..f3c5f0743c 100644 --- a/database_talker/launch/aibo_diary.launch +++ b/database_talker/launch/aibo_diary.launch @@ -1,10 +1,52 @@ + + + + + + + + + + + + + + + + + + + + + + + + + topics: + - /aibo_driver/biting_status + - /aibo_driver/body_touched_status + - /aibo_driver/found_objects_status + - /aibo_driver/hungry_status + - /aibo_driver/name_called_status + - /aibo_driver/paw_pads_status + - /aibo_driver/posture_status + - /aibo_driver/sleepy_status + - /aibo_driver/voice_command_status + + From f9bb1cdbd51d45bf67ec4afd5ae2590875aa4baf Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 10:10:32 +0900 Subject: [PATCH 51/60] DatabaseTalkerBase.make_response: write all result images --- database_talker/src/database_talker/__init__.py | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index 2b7546bc48..f25933b987 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -553,9 +553,14 @@ def make_response(self, text, language="Japanese"): if len(results) > 0: if not IsHeadless: # debug try: - cv2.imshow('images for response', cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results])) + concat_images = cv2.hconcat([cv2.imdecode(np.fromstring(result['image'].data, np.uint8), cv2.IMREAD_COLOR) for result in results]) + filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) + cv2.imwrite(filename, concat_images) + rospy.logwarn("save all images to {}".format(filename)) + cv2.imshow('images for response', concat_images) cv2.waitKey(100) - except: + except Exception as e: + rospy.logerr(e) pass # pubish as card filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) From 3afe66ebabf2a846ef434befd1ad6b68cb943b27 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 10:22:13 +0900 Subject: [PATCH 52/60] DatabaseTalkerBase.make_response: select most closest images from timestamp of VQA results --- database_talker/src/database_talker/__init__.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index f25933b987..71304f90c3 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -516,7 +516,7 @@ def make_response(self, text, language="Japanese"): if no >= 0 and no < len(image_activities): image_activity = list(image_activities.items())[no] answer, timestamp = image_activity - rospy.loginfo("Choose {} : {} as corresponging memory".format(no, answer)) + rospy.loginfo("Choose {} : {} as corresponging memory ({})".format(no, answer, timestamp)) # create response @@ -562,7 +562,10 @@ def make_response(self, text, language="Japanese"): except Exception as e: rospy.logerr(e) pass + # select closest image for response # pubish as card + if timestamp: + results.sort(key=lambda x: abs((x['timestamp'] - timestamp).total_seconds())) filename = tempfile.mktemp(suffix=".jpg", dir=rospkg.get_ros_home()) self.write_image_with_annotation(filename, results[0], "") return {'text': response, 'filename': filename} From c67cdc1f645ffb83f28fd474aa08dea19a0346eb Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 10:24:50 +0900 Subject: [PATCH 53/60] make_aibo_dairy: support --test-response --- database_talker/scripts/make_aibo_diary.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/database_talker/scripts/make_aibo_diary.py b/database_talker/scripts/make_aibo_diary.py index c2dc769436..c397e32e88 100755 --- a/database_talker/scripts/make_aibo_diary.py +++ b/database_talker/scripts/make_aibo_diary.py @@ -145,7 +145,8 @@ def make_diary(self, language="Japanese"): if __name__ == '__main__': parser = argparse.ArgumentParser() - parser.add_argument('--test', action='store_true') + parser.add_argument('--test-diary', '--test', action='store_true') + parser.add_argument('--test-response', type=str, default=None) parser.add_argument('--prompt-type', default='basic', choices=['basic','personality']) args = parser.parse_args(rospy.myargv()[1:]) @@ -155,10 +156,15 @@ def make_diary(self, language="Japanese"): logger = logging.getLogger('rosout') logger.setLevel(rospy.impl.rosout._rospy_to_logging_levels[rospy.DEBUG]) - ml = MessageListener(wait_for_chat_server=not args.test, prompt_type=args.prompt_type) - if args.test: + ml = MessageListener(wait_for_chat_server=not (args.test_diary or args.test_response), prompt_type=args.prompt_type) + if args.test_diary: ret = ml.make_diary() if 'filename' in ret: rospy.loginfo("image is saved at {}".format(ret['filename'])) sys.exit(0) + elif args.test_response: + ret = ml.make_response(args.test_response) + if 'filename' in ret: + rospy.loginfo("image is saved at {}".format(ret['filename'])) + sys.exit(0) rospy.spin() From d832ee5fbdf71150313f6f5a1e5ba56268c186c9 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 18:41:06 +0900 Subject: [PATCH 54/60] database_talker/__init__.py: add ~message Action Server to check if sending chat is failed --- .../src/database_talker/__init__.py | 31 ++++++++++++++++--- 1 file changed, 27 insertions(+), 4 deletions(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index 71304f90c3..f45edec461 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -44,7 +44,7 @@ from mongodb_store.util import deserialise_message from google_chat_ros.msg import Card, Section, WidgetMarkup, Image -from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal +from google_chat_ros.msg import MessageEvent, SendMessageAction, SendMessageGoal, SendMessageResult from mongodb_store_msgs.msg import StringPairList, StringPair from mongodb_store_msgs.srv import MongoQueryMsg, MongoQueryMsgRequest, MongoQueryMsgResponse @@ -118,6 +118,8 @@ def __init__(self, start_date=datetime.date.today(), wait_for_chat_server=True, rospy.loginfo("subscribe '/google_chat_ros/message_activity'") self.sub = rospy.Subscriber('/google_chat_ros/message_activity', MessageEvent, self.cb) + self.sas = actionlib.SimpleActionServer('~message', SendMessageAction, self.action_cb, auto_start=False) + self.sas.start() rospy.loginfo("all done, ready") @@ -831,7 +833,12 @@ def publish_google_chat_card(self, text, space, filename=None): goal.cards = [Card(sections=[Section(widgets=[WidgetMarkup(image=Image(localpath=filename))])])] goal.space = space rospy.logwarn("send {} to {}".format(goal.text, goal.space)) - self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) + ret = self.chat_ros_ac.send_goal_and_wait(goal, execute_timeout=rospy.Duration(0.10)) + result = self.chat_ros_ac.get_result() + if not result.done: + rospy.logerr("publish_google_chat_card: failed to send message, send_goal_and_wait({}), result.done({})".format(ret, result.done)) + return False + return True def text_to_salience(self, text): goal = AnalyzeTextGoal() @@ -860,6 +867,7 @@ def translate(self, text, dest): def cb(self, msg): + ac_ret = False space = 'spaces/AAAAoTwLBL0' ## default space JskRobotBot if msg._type == 'google_chat_ros/MessageEvent': text = msg.message.argument_text.lstrip() or msg.message.text.lstrip() @@ -867,7 +875,7 @@ def cb(self, msg): rospy.logwarn("Received chat message '{}' on {}".format(text, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S'))) else: rospy.logerr("Unknown message type {} on {}".format(msg._type, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S'))) - return + return False try: language = 'English' if is_ascii(text) else 'Japanese' @@ -906,4 +914,19 @@ def cb(self, msg): except Exception as e: rospy.logerr("Callback failed {} {}".format(e, traceback.format_exc())) self.publish_google_chat_card("💀 {}".format(e), space) - + return True + + + def action_cb(self, goal): + msg = MessageEvent() + msg.message.text = goal.text + msg.message.argument_text = goal.text + msg.space.name = goal.space + self.cb(msg) + ret = self.chat_ros_ac.wait_for_result(rospy.Duration(5.0)) + result = self.chat_ros_ac.get_result() + rospy.logwarn("action_cb: set_succeeded, wait_for_result({}), result.done({})".format(ret, result.done)) + if ret and result.done: + self.sas.set_succeeded(SendMessageResult(done=True)) + else: + self.sas.set_aborted() From 8c058f40295626080bd60357e82a2ab35bb4b8e7 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 18:42:54 +0900 Subject: [PATCH 55/60] sample/include/store_image_description.py: check if image is black/blurred, also check similarity with past 10 sentences --- .../sample/include/store_image_description.py | 27 ++++++++++++++++--- 1 file changed, 23 insertions(+), 4 deletions(-) diff --git a/database_talker/sample/include/store_image_description.py b/database_talker/sample/include/store_image_description.py index 7278a944ab..e10a6c936f 100644 --- a/database_talker/sample/include/store_image_description.py +++ b/database_talker/sample/include/store_image_description.py @@ -27,6 +27,7 @@ image_pub = None chat_completion = None images = [np.array(cv2.imencode('.jpg', np.zeros((120,160,3), np.uint8))[1]).tostring()] +answers = [] def vqa(question, images, temperature = 0.0, max_tokens = 300, debug = False): global chat_completion @@ -72,9 +73,8 @@ def debug_cb(data): mongodb_event_sub = rospy.Subscriber('/publish_trigger_mongodb_event', rospy.AnyMsg, debug_cb, queue_size=1) def cb(msg): - rospy.logerr("debug cb") global chat_completion - global images + global images, answers global result_pub, image_pub global debug_msg @@ -82,7 +82,7 @@ def cb(msg): if len(images) == 0: images.extend([small_msg_data]) - questions = rospy.get_param('~questions', ['Provide a brief caption under 140 characters for this image, focusing on the most striking aspect and overall atmosphere.']) + questions = rospy.get_param('~questions', ['Provide a brief caption under 140 characters for this image, focusing on the most striking aspect and overall atmosphere. If the images is black, blurred, disturbed or shows meaningless objects, answer "NO"']) question = ' '.join(questions) if type(questions) == list else questions if (not IsHeadless): @@ -97,13 +97,32 @@ def cb(msg): [small_msg_data] + images, temperature = 1.0, debug=True) use_this_image = 'YES' in use_this_image_answer[:10] ''' - if abs((rospy.Time.now() - debug_msg.header.stamp).to_sec()) < 5 and debug_msg.data == 'debug': + elapsed_from_trigger = abs((rospy.Time.now() - debug_msg.header.stamp).to_sec()) + rospy.loginfo("received images, {} sec after trigger event".format(elapsed_from_trigger)) + if elapsed_from_trigger < 5 and debug_msg.data == 'debug': images.extend([small_msg_data]) if len(images) > 10: images = images[1:] try: answer = vqa(question, [msg.data], temperature = 1.0) + if answer == 'NO': + raise Exception('Invalid image') + rospy.loginfo("- {}".format(answer)) + for a in answers: + rospy.loginfo(" .. {}".format(a)) + req = ChatCompletionsRequest(model="gpt-3.5-turbo", + messages = json.dumps([{"role": "system", "content": "You can compare whether your sentenses describe the same scene and returns with 'YES' or 'NO'"}, + {"role": "user", "content": "Return 'YES' if given text '{}' is similar to one of the following list '{}', otherwise return 'NO'".format(answer, answers)} + ])) + rospy.loginfo("Q: {}".format(req.messages[0:255])) + ret = chat_completion(req) + rospy.loginfo("A: {}".format(ret.content)) + if ret.content == 'YES': + raise Exception('Duplicates image') + answers.extend([answer]) + if len(answers) > 5: + answers = answers[1:] result_pub.publish(VQATaskActionResult(header=Header(stamp=rospy.Time.now()), result=VQATaskResult(result=VQAResult(result=[QuestionAndAnswerText(question=question, answer=answer)]), done=True))) image_pub.publish(msg) From 1883a38f55388c01a4a1463303f49e53ee587cd7 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Fri, 26 Apr 2024 18:47:06 +0900 Subject: [PATCH 56/60] launch/include/database_talker.launch : set respawn true to thorottled_color_logger --- database_talker/launch/include/database_talker.launch | 1 + 1 file changed, 1 insertion(+) diff --git a/database_talker/launch/include/database_talker.launch b/database_talker/launch/include/database_talker.launch index 15ecfeec47..72872906d5 100644 --- a/database_talker/launch/include/database_talker.launch +++ b/database_talker/launch/include/database_talker.launch @@ -21,6 +21,7 @@ + Date: Fri, 14 Jun 2024 17:48:06 +0900 Subject: [PATCH 57/60] launch/include/database_talker.launch: add start_mongodb_record_nodelet_manager, start_mongodb --- database_talker/launch/include/database_talker.launch | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/database_talker/launch/include/database_talker.launch b/database_talker/launch/include/database_talker.launch index 72872906d5..b393429619 100644 --- a/database_talker/launch/include/database_talker.launch +++ b/database_talker/launch/include/database_talker.launch @@ -5,6 +5,9 @@ + + + @@ -15,6 +18,8 @@ + + From bdc2189e4265705ca261e4dd0bf1374cdc5f89e0 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 1 Aug 2024 12:42:34 +0900 Subject: [PATCH 58/60] src/database_talker/__init__.py : check is pickle activities start from self.start_date --- database_talker/src/database_talker/__init__.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/database_talker/src/database_talker/__init__.py b/database_talker/src/database_talker/__init__.py index f45edec461..5ad826ca4b 100644 --- a/database_talker/src/database_talker/__init__.py +++ b/database_talker/src/database_talker/__init__.py @@ -163,7 +163,17 @@ def query_mongo_data_days(self, types=None, days=7): (datetime.datetime.today() - datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file))).seconds < 1 * 60 * 60): # seconds -> hours rospy.loginfo('Loading cached activities data {}'.format(datetime.datetime.fromtimestamp(os.path.getmtime(self.pickle_file)))) with open(self.pickle_file, 'rb') as f: - return pickle.load(f) + activities = pickle.load(f) + # check if activitis start from self.start_date + if len(activities) > 0 and len(activities[0]) > 0 and \ + len(activities[0][0]) > 0 and activities[0][0][1].get('timestamp') : + timestamp = datetime.datetime.fromtimestamp(activities[0][0][1]['timestamp']//1000000000, JST) + rospy.loginfo(' ... cached data is starting from {}'.format(timestamp)) + if abs((timestamp - datetime.datetime.combine(self.start_date, datetime.datetime.min.time(), tzinfo=JST)).total_seconds()) < 86400 : # 24 hours + rospy.loginfo(' ... using cached activities for {}'.format(self.start_date)) + return activities + else: + rospy.logwarn("Cached file({}) is different from start_date({}), loading from mongoDB".format(timestamp, self.start_date)) activities = [] today = self.start_date ## for debug ... -> - datetime.timedelta(hours=24) @@ -877,6 +887,8 @@ def cb(self, msg): rospy.logerr("Unknown message type {} on {}".format(msg._type, datetime.datetime.now(JST).strftime('%Y-%m-%d %H:%M:%S'))) return False + # when callbacked, update start_date to today + self.start_date=datetime.date.today() try: language = 'English' if is_ascii(text) else 'Japanese' if any(x in text for x in ['diary', '日記']): From e5481d71031bbaf06b73c2bcebb9e34b2a20cf88 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 1 Aug 2024 12:44:15 +0900 Subject: [PATCH 59/60] scripts/make_lovot_diary.py : lovot need to use yesterday's data, because data retrieval from Lovot is only supported on daily basis, so we must replacate them on the next day --- database_talker/scripts/make_lovot_diary.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/database_talker/scripts/make_lovot_diary.py b/database_talker/scripts/make_lovot_diary.py index 2be054d887..81201d885e 100755 --- a/database_talker/scripts/make_lovot_diary.py +++ b/database_talker/scripts/make_lovot_diary.py @@ -76,6 +76,11 @@ def make_lovot_activities_raw(self, mongo_data_days = None): ## return diary_activities_raw ## (timestamp, event) + def make_diary(self, *args, **kwargs): + # lovot need to use yesterday's data, because data retrieval from Lovot is only supported on daily basis, so we must replacate them on the next day + self.start_date = self.start_date - datetime.timedelta(days=1) + return super(LovotDatabaseTalker, self).make_diary(*args, **kwargs) + if __name__ == '__main__': parser = argparse.ArgumentParser() @@ -100,7 +105,7 @@ def make_lovot_activities_raw(self, mongo_data_days = None): rospy.logerr("Invalid date format") sys.exit(1) - ml = LovotDatabaseTalker(start_date=start_date, wait_for_chat_server=not args.test, use_activities_cache=not args.test, prompt_type=args.prompt_type) + ml = LovotDatabaseTalker(start_date=start_date, wait_for_chat_server=not args.test, prompt_type=args.prompt_type) if args.test: ret = ml.make_diary() if 'filename' in ret: From e57ac5669e706e2db1f676a5bae59c7d41dcbe47 Mon Sep 17 00:00:00 2001 From: Kei Okada Date: Thu, 1 Aug 2024 12:44:48 +0900 Subject: [PATCH 60/60] add launch/lovot_diary.launch --- database_talker/launch/lovot_diary.launch | 37 +++++++++++++++++++++++ 1 file changed, 37 insertions(+) create mode 100644 database_talker/launch/lovot_diary.launch diff --git a/database_talker/launch/lovot_diary.launch b/database_talker/launch/lovot_diary.launch new file mode 100644 index 0000000000..98a75127b0 --- /dev/null +++ b/database_talker/launch/lovot_diary.launch @@ -0,0 +1,37 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +