Commit 1bfa53e7 authored by Aleš Horák's avatar Aleš Horák
Browse files

update

parent 6c8f497f
# Come with me Dialog
Developed at [NLP Centre](https://nlp.fi.muni.cz/en), [FI MU](https://www.fi.muni.cz/index.html.en) for [Karel Pepper](https://nlp.fi.muni.cz/trac/pepper)
This is a dialog application, in both Czech and English, that enables the robot to come with a person hand in hand.
Start with "Pepper, come with me." / "Karle, pojď se mnou"
## Installation
* [make and install](https://nlp.fi.muni.cz/trac/pepper/wiki/InstallPkg) the package as usual for the Pepper robot
<?xml version="1.0" encoding="UTF-8" ?>
<Package name="dialog_coming" format_version="4">
<Manifest src="manifest.xml" />
<BehaviorDescriptions/>
<Dialogs>
<Dialog name="dlg_coming" src="dlg_coming/dlg_coming.dlg" />
</Dialogs>
<Resources>
<File name="icon" src="icon.png" />
<File name="dialog_coming_service" src="scripts/dialog_coming_service.py" />
<File name="__init__" src="scripts/stk/__init__.py" />
<File name="events" src="scripts/stk/events.py" />
<File name="logging" src="scripts/stk/logging.py" />
<File name="runner" src="scripts/stk/runner.py" />
<File name="services" src="scripts/stk/services.py" />
</Resources>
<Topics>
<Topic name="dlg_coming_czc" src="dlg_coming/dlg_coming_czc.top" topicName="dlg_coming" language="cs_CZ" />
<Topic name="dlg_coming_enu" src="dlg_coming/dlg_coming_enu.top" topicName="dlg_coming" language="en_US" />
</Topics>
<IgnoredPaths>
<Path src=".metadata" />
</IgnoredPaths>
</Package>
multilanguage:dlg_coming
enu:dlg_coming_enu.top
czc:dlg_coming_czc.top
topic: ~dlg_coming()
language: czc
concept:(call_robot_name) [Karle Pepře]
u:(e:FrontTactilTouched)
\pau=100\
u:(e:RearTactilTouched)
\pau=100\
u:(~call_robot_name [pojď poď] se mnou) Vem mě za ruku ^sCall(ALMotionComing.comeWithUser())
u:(~call_robot_name ["[pojď poď] ke mně" "sleduj mě" "následuj mě"]) Okej ^sCall(ALMotionComing.trackUser())
u:^private({[ještě pořád]} mě {[ještě pořád]} [sleduješ následuješ]) ^call(ALMotionComing.isTracking())
c1:(true) Ano
c1:(_*) Ne
u:^private([stop dost stačí]) ^call(ALMotionComing.isTracking())
c1:(true) Jasně ^sCall(ALMotionComing.stopTrackingUser())
c1:(_*) Okej
topic: ~dlg_coming()
language: enu
concept:(call_robot_name) [Karel Pepper]
u:(e:FrontTactilTouched)
\pau=100\
u:(e:RearTactilTouched)
\pau=100\
u:(~call_robot_name come with me) Catch my hand ^sCall(ALMotionComing.comeWithUser())
u:(~call_robot_name ["come to me" "follow me" "track me"]) Okay ^sCall(ALMotionComing.trackUser())
u:^private(are you {still} [following tracking] me) ^call(ALMotionComing.isTracking())
c1:(true) Yes
c1:(_*) No
u:^private([stop enough]) ^call(ALMotionComing.isTracking())
c1:(true) No problem ^sCall(ALMotionComing.stopTrackingUser())
c1:(_*) Okay
icon.png

28 KB

<package uuid="dialog_coming" version="0.1.92">
<names>
<name lang="en_US">dialog_coming</name>
<name lang="cs_CZ">dialog_coming</name>
</names>
<descriptions>
<description lang="en_US">Come with me dialog</description>
<description lang="cs_CZ">Come with me dialog</description>
</descriptions>
<supportedLanguages>
<language>en_US</language>
<language>cs_CZ</language>
</supportedLanguages>
<descriptionLanguages>
<language>en_US</language>
</descriptionLanguages>
<contents>
<behaviorContent />
<dialogContent topicName="dlg_coming" typeVersion="1.0">
<topic language="cs_CZ" path="dlg_coming/dlg_coming_czc.top" />
<topic language="en_US" path="dlg_coming/dlg_coming_enu.top" />
</dialogContent>
</contents>
<requirements>
<naoqiRequirement minVersion="2.3" />
<robotRequirement model="JULIETTE" />
</requirements>
<services>
<service autorun="true" execStart="/usr/bin/python2 scripts/dialog_coming_service.py" name="ALMotionComing" />
</services>
</package>
"""
A sample showing how to have a NAOqi service as a Python app.
"""
__version__ = "0.0.3"
__copyright__ = "Copyright 2015, Aldebaran Robotics"
__author__ = 'ekroeger'
__email__ = 'ekroeger@aldebaran.com'
import qi
import json
import time
import sys
import math
import motion
import os
import stk.runner
import stk.events
import stk.services
import stk.logging
PREFER_FORWARD_MOVE = True
TIMEOUT_TO_STOP = 30
TIME_TO_MAKE_BEEP = 3
TIME_TO_RESET_MOVE_FAILED = 10
TRIES_TO_STOP_TRACKING_HUMAN_LOST = 3
TRIES_TO_STOP_TRACKING_MOVE_FAILED = 5
MAKE_OOPS_SOUND_FILE = "/var/tmp/make_oops_sound.txt"
class ALMotionComing(object):
"NAOqi service example (set/get on a simple value)."
APP_ID = "com.aldebaran.ALMotionComing"
def __init__(self, qiapp):
# generic activity boilerplate
self.qiapp = qiapp
self.events = stk.events.EventHelper(qiapp.session)
self.s = stk.services.ServiceCache(qiapp.session)
self.logger = stk.logging.get_logger(qiapp.session, self.APP_ID)
# Internal variables
# This script does not work for armVar="R", because joint angles are flipped.
# more here: http://doc.aldebaran.com/2-0/family/juliette_technical/joints_juliette.html
self.armVar = "L"
self.armName = self.armVar + "Arm"
self.offerArmPose = {
'ShoulderPitch': 1.26,
'ShoulderRoll': 0.33,
'ElbowYaw': -1.58,
'ElbowRoll': -1.11,
'WristYaw': -0.71,
'Hand': 0.54}
self.offerArmPosition = []
self.initTrackingMode = None
self.targetXMem = 0
self.minReactionTrigger = {
'targetX': 0.05, # minimal absolute value of speed to which robot reacts.
'targetTheta': 0.05, # minimal absolute value of turn to which robot reacts.
}
self.maxX = 1
self.minX = -0.5 # only half of speed while reversing
self.delayEndTime = 0.1 # how long robot reacts without sensor being touched (seconds)
self.jointNames = []
self.is_following = False
self.is_moving = False
self.offerArmPoseAngles = None
self.move_counter = 0
self.time_action = -1
self.is_tracking = False
self.saved_tracking_mode = None
self.saved_engagement_mode = None
self.subscribers_list = []
self.user = None
self.tracked_user = None
self.on_human_tracked_time = None
self.on_human_tracked_time_beep = None
self.on_human_tracked_tries = None
self.on_move_failed_time = None
self.on_move_failed_time_beep = None
self.on_move_failed_tries = None
@qi.nobind
def offerArm(self, showPos = False):
if self.offerArmPoseAngles is None:
self.offerArmPoseAngles = [self.offerArmPose[x[1:]] for x in self.jointNames]
if showPos:
self.logger.info("offerArm: {}".format(', '.join([x[0]+': '+str(x[1]) for x in zip(self.jointNames, self.offerArmPoseAngles)])))
if self.s.ALMotion:
self.s.ALMotion.setAngles(self.jointNames, self.offerArmPoseAngles, 0.3)
else:
self.logger.warning("ALMotion not found")
@qi.nobind
def loosenArm(self, loosenOn):
if self.s.ALMotion:
if loosenOn:
self.logger.info("Loosening arm")
#[ShoulderPitch,ShoulderRoll,ElbowYaw,ElbowRoll,WristYaw]
self.s.ALMotion.stiffnessInterpolation(self.armName, [0.02,0.02,0.02,0.02,0.02,0.02], 0.5)
else:
stiff = self.s.ALMotion.getStiffnesses(self.armName)
if stiff[0] < 1.0:
self.logger.info("Resetting stiffnesses of {}".format(self.armName))
self.s.ALMotion.stiffnessInterpolation(self.armName, 1.0, 1.5)
self.offerArm(stiff[0] < 1.0)
else:
self.logger.warning("ALMotion not found")
@qi.nobind
def initRobotPosition(self):
"""
Inits NAO's position and stiffnesses to make the guiding possible.
"""
inited = False
try:
self.logger.info("Starting...")
self.s.ALMotion.wakeUp()
#self.s.ALAutonomousLife.stopAll()
#self.s.ALAutonomousLife.setState("interactive")
self.s.ALAutonomousLife.setAutonomousAbilityEnabled("All", False)
#self.s.ALAutonomousLife.stopMonitoringLaunchpadConditions()
self.initTrackingMode = self.s.ALBasicAwareness.getTrackingMode()
self.s.ALBasicAwareness.pauseAwareness()
#self.s.ALBasicAwareness.setTrackingMode('Head')
self.s.ALBackgroundMovement.setEnabled(False)
self.s.ALRobotPosture.goToPosture("StandInit", 0.3)
self.s.ALMotion.moveInit()
time.sleep(1.0)
self.s.ALMotion.setStiffnesses(self.armName, 1.0)
#[Yaw, Pitch]
self.s.ALMotion.setAngles("Head", [1, -0.44], 0.3)
self.offerArm(True)
# Disable left arm moves while moving
self.s.ALMotion.setMoveArmsEnabled(False, True)
time.sleep(1.0)
self.move_counter = 0
self.time_action = time.time()
inited = True
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
return (inited)
@qi.nobind
def translate_diff_to_move(self, arm_pos, arm_pos_diff):
move = []
move_source = []
targetX = 0.0
targetY = 0.0
targetTheta = 0.0
targetX = (-arm_pos_diff[0]) + arm_pos_diff[3] # -ShoulderPitch + ElbowRoll
if (targetX <= 0):
# if move backwards:
# substract an ElbowYaw from theta, because ShoulderRoll is more locked in position
# thus: turn = ShoulderRoll - ElbowYaw
targetTheta = (arm_pos_diff[1] - arm_pos_diff[2])
else:
# if move forward:
# cannot substract an ElbowYaw from theta, because ElbowYaw is influenced by ShoulderRoll
# thus: turn = ShoulderRoll
targetTheta = arm_pos_diff[1]
if (targetTheta < 0):
# if turning right:
# in starting position a hand is naturaly closer to body; therefore, turning right needs to be more sensitive
targetTheta = targetTheta * 2
if self.minReactionTrigger['targetX'] > targetX > -self.minReactionTrigger['targetX']:
targetX = 0.0
else:
targetX = max(-1,min(1, targetX))
if self.minReactionTrigger['targetTheta'] > targetTheta > -self.minReactionTrigger['targetTheta']:
targetTheta = 0.0
else:
targetTheta = max(-1,min(1, targetTheta))
# if turning then slow targetX down.
targetX -= math.copysign(min(abs(targetTheta), abs(targetX)), targetX) * 0.6
# if sharp turn then slow it down
MaxVelTheta = 1 - (targetTheta**2 * 0.4)
# if faster then boost
MaxVelXY = max(0.30, min(targetX/2 + 0.05, 0.55)) # turbo
# if quick pull back then BRAKES
if (self.targetXMem > 0 and targetX - self.targetXMem < - 0.3):
self.targetXMem = targetX
return(0,0,0)
self.targetXMem = targetX
return (targetX, targetY, targetTheta, [['MaxVelXY', MaxVelXY], ['MaxVelTheta', MaxVelTheta]]) # medium speed
@qi.nobind
def interpretJointsPose(self):
"""
Translates the current left arm pose into a target position
"""
# Retrieve current arm position.
armPose = self.s.ALMotion.getAngles(self.jointNames, True)
arm_pos_diff = [x-self.offerArmPoseAngles[i] for i, x in enumerate(armPose)]
# Return corresponding pose.
return self.translate_diff_to_move(armPose, arm_pos_diff)
@qi.nobind
def moveToTargetPose(self, targetPose):
"""Move to the desired target with the current foot."""
#self.logger.info("{}. moveToward{}".format(self.move_counter, targetPose)
self.s.ALMotion.moveToward(*targetPose)
self.move_counter += 1
@qi.nobind
def time_active(self):
self.time_action = time.time()
@qi.nobind
def time_idle(self):
"""Compute idle time"""
if self.time_action < 0:
return (0)
return (time.time() - self.time_action)
@qi.bind(returnType=qi.Void, paramsType=[])
def comeWithUser(self):
"""
This example shows how to guide Pepper by the hand
"""
try:
self.is_following = True
self.jointNames = self.s.ALMotion.getBodyNames(self.armName)
# Init robot position.
self.initRobotPosition()
# Wait for the user to press the front tactile sensor.
self.logger.info("waiting for FrontTactilTouched")
try:
self.s.ALAudioPlayer.playSoundSetFile("enu_ono_ba_human_detected_06")
except:
pass
while not self.s.ALMemory.getData("HandLeftBackTouched") and (self.time_idle() < TIMEOUT_TO_STOP):
self.offerArm()
if (self.time_idle() >= TIMEOUT_TO_STOP):
return
self.is_moving = False
# Set LEDs to blue.
self.s.ALLeds.fadeRGB("FaceLeds", 255, 0.1)
try:
self.s.ALAudioPlayer.playSoundSetFile("enu_word_ok_01")
except:
pass
self.time_active()
isMoving = False
handTouched = True
while not self.s.ALMemory.getData("RearTactilTouched") and (self.time_idle() < TIMEOUT_TO_STOP):
#self.s.ALBasicAwareness.setTrackingMode('Head')
self.s.ALBasicAwareness.pauseAwareness()
if (not (time.time() - self.time_action < self.delayEndTime*0.5)):
armPose = self.s.ALMotion.getAngles(self.jointNames, True)
if ((armPose[0] + math.pi + armPose[3] - armPose[1]) < 4 and handTouched):
self.time_active()
elif (self.s.ALMemory.getData("HandLeftBackTouched")):
handTouched = True
self.time_active()
else:
handTouched = False
if (time.time() - self.time_action < self.delayEndTime):
if not isMoving:
self.loosenArm(True)
self.s.ALMotion.setAngles("Head", [1, -0.44], 0.3)
# needs ENABLE_DEACTIVATION_OF_FALL_MANAGER in /home/naoqi/.config/naoqi/ALMotion.xml
self.s.ALMotion.setExternalCollisionProtectionEnabled("Move", False)
self.s.ALMotion.moveInit()
isMoving = True
targetPose = self.interpretJointsPose()
if (targetPose == (0,0,0)):
#BRAKES
self.s.ALMotion.stopMove()
else:
self.moveToTargetPose(targetPose)
# Set LEDs to green.
self.s.ALLeds.fadeRGB("FaceLeds", 256 * 255, 0.1)
else:
if isMoving:
# Stop the robot.
self.s.ALMotion.stopMove()
self.s.ALMotion.setExternalCollisionProtectionEnabled("Move", True)
isMoving = False
# Set LEDs to blue.
self.s.ALLeds.fadeRGB("FaceLeds", 255, 0.1)
self.loosenArm(False)
finally:
if self.time_idle() >= TIMEOUT_TO_STOP:
self.logger.info("Stopping on timeout ({}s)...".format(int(self.time_idle())))
else:
self.logger.info("Stopping...")
self.is_following = False
self.stop_moving()
@qi.nobind
def pause_moving(self):
"pause moving"
try:
if self.is_moving:
self.is_moving = False
# Stop the robot.
self.s.ALMotion.stopMove()
self.s.ALMotion.setExternalCollisionProtectionEnabled("Move", True)
# Set LEDs to blue.
self.s.ALLeds.fadeRGB("FaceLeds", 255, 0.1)
self.loosenArm(False)
self.time_action = time.time()
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
@qi.nobind
def resume_moving(self):
"resume moving"
try:
if not self.is_moving:
self.s.ALMotion.setAngles("Head", [0.44, -0.44], 0.1)
# needs ENABLE_DEACTIVATION_OF_FALL_MANAGER in /home/naoqi/.config/naoqi/ALMotion.xml
self.s.ALMotion.setExternalCollisionProtectionEnabled("Move", False)
self.s.ALAutonomousLife.setAutonomousAbilityEnabled("All", False)
self.s.ALBasicAwareness.pauseAwareness()
self.s.ALBackgroundMovement.setEnabled(False)
self.s.ALMotion.moveInit()
# Set LEDs to green.
self.s.ALLeds.fadeRGB("FaceLeds", 256 * 255, 0.1)
self.is_moving = True
self.loosenArm(True)
self.time_action = -1
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
@qi.nobind
def stop_moving(self):
"stop moving"
try:
self.pause_moving()
# Set LEDs to white.
self.s.ALLeds.fadeRGB("FaceLeds", 256 * 256 * 255 + 256 * 255 + 255, 0.1)
self.s.ALMotion.setMoveArmsEnabled(True, True)
#self.s.ALBasicAwareness.setTrackingMode(initTrackingMode)
self.s.ALBasicAwareness.resumeAwareness()
self.s.ALBackgroundMovement.setEnabled(True)
#self.s.ALAutonomousLife.setState("solitary")
#self.s.ALAutonomousLife.startMonitoringLaunchpadConditions()
self.s.ALAutonomousLife.setAutonomousAbilityEnabled("All", True)
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
self.logger.info("ALMotionComing stopped moving")
@qi.bind(returnType=qi.Bool, paramsType=[])
def isMoving(self):
"whether is moving"
return (self.is_moving)
@qi.bind(returnType=qi.Bool, paramsType=[])
def isFollowing(self):
"whether is coming with someone by hand"
return (self.is_following)
@qi.bind(returnType=qi.Bool, paramsType=[])
def isTracking(self):
"whether is tracking someone"
return (self.is_tracking)
@qi.bind(returnType=qi.Void, paramsType=[])
def trackUser(self):
"""
This example shows how to track focused user
"""
try:
self.user = self.s.ALUserSession.getFocusedUser()
if self.user < 0:
self.logger.warning("No focused user to track")
return
self.tracked_user = self.s.ALUserSession.getPpidFromUsid(self.user)
if self.tracked_user < 0:
self.logger.warning("User {} does not exist in UserSession".format(self.self.tracked_user))
return
self.is_tracking = True
self.on_human_tracked_time = None
self.on_human_tracked_time_beep = None
self.on_human_tracked_tries = None
self.on_move_failed_time = None
self.on_move_failed_time_beep = None
self.on_move_failed_tries = None
self.logger.info("about to track user {}".format(self.user))
self.s.ALMotion.wakeUp()
fractionMaxSpeed = 0.8
self.s.ALRobotPosture.goToPosture("StandInit", fractionMaxSpeed)
targetName = "People"
self.saved_tracking_mode = self.s.ALBasicAwareness.getTrackingMode()
self.s.ALBasicAwareness.setTrackingMode("WholeBody")
self.saved_engagement_mode = self.s.ALBasicAwareness.getEngagementMode()
self.s.ALBasicAwareness.setEngagementMode("FullyEngaged")
self.s.ALTracker.registerTarget(targetName, self.tracked_user)
self.s.ALTracker.setMode("Move")
# Set the robot relative position to target
# The robot stays a 50 centimeters of target with 10 cm precision
self.s.ALTracker.setRelativePosition([-0.5, 0.0, 0.0, 0.1, 0.1, 0.3])
self.s.ALTracker.toggleSearch(True)
self.s.ALTracker.track(targetName)
self.connect_callback("ALBasicAwareness/HumanTracked", self.on_human_tracked)
#self.connect_callback("ALBasicAwareness/HumanLost", self.on_human_lost)
self.connect_callback("ALMotion/MoveFailed", self.on_move_failed)
self.logger.info("ALTracker successfully started tracking user {}".format(self.tracked_user))
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
self.stopTrackingUser()
except Exception as e:
self.logger.warning("Exception {}".format(e))
self.stopTrackingUser()
@qi.bind(returnType=qi.Void, paramsType=[])
def stopTrackingUser(self):
if self.is_tracking:
try:
self.is_tracking = False
self.s.ALTracker.stopTracker()
self.s.ALTracker.unregisterAllTargets()
if self.saved_tracking_mode is not None:
self.s.ALBasicAwareness.setTrackingMode(self.saved_tracking_mode)
self.saved_tracking_mode = None
if self.saved_engagement_mode is not None:
self.s.ALBasicAwareness.setEngagementMode(self.saved_engagement_mode)
self.saved_engagement_mode = None
except AttributeError as e:
self.logger.warning("Service not found: {}".format(e))
#self.disconnect_callbacks(["ALBasicAwareness/HumanLost", "ALMotion/MoveFailed"])
self.disconnect_callbacks("all")
self.user = None
self.tracked_user = None
self.logger.info("ALTracker stopped.")
@qi.nobind
def connect_callback(self, event_name, callback_func):
""" connect a callback for a given event """
subscriber = self.s.ALMemory.subscriber(event_name)
subscriber.signal.connect(callback_func)
self.subscribers_list.append((event_name,subscriber))
@qi.nobind
def disconnect_callbacks(self, event_names):
""" disconnect all callbacks for a given event """
if event_names == "all":
self.subscribers_list = []