Please help getting jaw movement to be full with speakBlocking(some text)

#here is my full script
 
 
from java.lang import String
import threading
import time
import random
from org.myrobotlab.net import BareBonesBrowserLaunch
##
import urllib, urllib2
import json
 
from datetime import datetime
#######################
import io
import itertools
import textwrap
import codecs
import socket
import os
import shutil
import hashlib
import subprocess
import csv
from subprocess import Popen, PIPE
from time import sleep
 
#############################################################
# All bot specific hardware configuration goes here.
leftPort = "COM4"
rightPort = "COM3"
neoPort = "COM6"
gesturesPath = "C:\mrl\ProgramAB_bots\gestures"
aimlPath = "C:\mrl\ProgramAB"
aimlBotName = "inmoovWebKit"
aimlUserName = "Stephen"
lang="EN"
Voice="cmu-slt-hsmm" # Default female for MarySpeech
#Voice="cmu-bdl-hsmm" # Default male for MarySpeech
voiceType = Voice
############################################################
 
global human
global inmoov
global weathervar
global walkingThread
#############################################################
# helper function help debug the recognized text from webkit/sphinx
######################################################################
def heard(data):
  print "Speech Recognition Data:"+str(data)
 
######################################################################
#
# MAIN ENTRY POINT  - Start and wire together all the services.
#
######################################################################
# Create ProgramAB chat bot ( This is the inmoov "brain" )
######################################################################
#Starting Arduino Service
arduino = Runtime.createAndStart("arduino","Arduino")
arduino.connect("neoPort")
 
#Starting NeoPixel Service
neopixel = Runtime.createAndStart("neopixel","NeoPixel")
 
#neopixel.attach(arduino, pin, number of pixel)
neopixel.attach(arduino, 6, 16)
 
#Animation Names:
#"Color Wipe"
#"Larson Scanner" Knight Rider
#"Theater Chase"
#"Theater Chase Rainbow"
#"Rainbow"
#"Rainbow Cycle"
#"Flash Random"
#"Ironman"
#speed: 1-65535   1=full speed, 2=2x slower than 1, 10=10x slower than 1
 
def pixoff():
  #turn off all the pixels
  for pixel in range (1,neopixel.numPixel + 1):
   neopixel.setPixel(pixel, 0, 0, 0)  #setPixel(pixel, red, green, blue)
  neopixel.writeMatrix() #send the pixel data to the Neopixel hardware 
 
def pixred():
  for loop in range(0,10): #do 10 loop
    for pixel in range(1, neopixel.numPixel +1):
      neopixel.setPixel(pixel, 255, 255, 255) #set the pixel to RGB
      neopixel.writeMatrix()
      sleep(0.03) #give a bit of delay before next step
      neopixel.setPixel(pixel, 0, 0, 0) #turn off the pixel
  neopixel.writeMatrix()
 
### starting an animation ##############
#neopixel.setAnimation("Animation Name", red, green, blue, speed) #RGB!!!
neopixel.setAnimation("Theater Chase", 255, 255, 255, 1) #running Theater Chase with color blue at full speed
 
inmoovWebKit = Runtime.createAndStart("inmoovWebKit", "ProgramAB")
inmoovWebKit.setPath(aimlPath)
inmoovWebKit.startSession(aimlUserName, aimlBotName)
 
######################################################################
# Html filter to clean the output from programab.  (just in case)
htmlfilter = Runtime.createAndStart("htmlfilter", "HtmlFilter")
 
######################################################################
# mouth service, speech synthesis
mouth = Runtime.createAndStart("i01.mouth", "MarySpeech")
mouth.setVoice(voiceType)
#mouth.setAudioEffects("F0Add(f0Add=90.0)+TractScaler(amount=1.2)")
 
######################################################################
# the "ear" of the inmoov
ear = Runtime.createAndStart("i01.ear", "WebkitSpeechRecognition")
ear.addListener("publishText", python.name, "heard");
ear.addMouth(mouth)
 
WebkitSpeechRecognitionFix = Runtime.start("WebkitSpeechRecognitionFix","Clock")
WebkitSpeechRecognitionFix.setInterval(1000)
WebkitSpeechRecognitionFix.startClock()
 
######################################################################
# MRL Routing webkitspeechrecognition/ear -> program ab -> htmlfilter -> mouth
######################################################################
ear.addTextListener(inmoovWebKit)
inmoovWebKit.addTextListener(htmlfilter)
htmlfilter.addTextListener(mouth)
 
######################################################################
#Gets the battery level
level = Runtime.getBatteryLevel()
 
######################################################################
# CREATE the InMoov services and set Min/Max/Mapping/Rest
######################################################################
i01 = Runtime.create("i01", "InMoov")
##############
head = Runtime.create("i01.head","InMoovHead")
##############
# tweaking default settings of jaw
#head.jaw.setMinMax(105,160)
head.jaw.map(0,180,105,160)
 
# tweaking default settings of eyes
head.eyeX.map(0,180,75,105)
head.eyeY.map(0,180,110,180)
 
# tweaking default settings of head
head.neck.map(0,180,10,170) #capable 0-180
head.rothead.map(0,180,170,30)
##############
 
torso = Runtime.create("i01.torso", "InMoovTorso")
# tweaking default torso settings
torso.topStom.map(0,180,80,130)
torso.midStom.map(0,180,120,30) #130
 
##############
leftHand = Runtime.create("i01.leftHand","InMoovHand")
# tweaking default settings of left hand
leftHand.thumb.map(0,180,0,180)
leftHand.index.map(0,180,0,180)
leftHand.majeure.map(0,180,0,180)
leftHand.ringFinger.map(0,180,0,180)
leftHand.pinky.map(0,180,0,180)
leftHand.wrist.map(0,180,120,180)
 
###############
leftArm = Runtime.create("i01.leftArm","InMoovArm")
#tweak defaults LeftArm
leftArm.bicep.map(0,180,0,80)
leftArm.rotate.map(0,180,40,160)
leftArm.shoulder.map(0,180,30,160)
leftArm.omoplate.map(0,180,0,70)
################
 
rightHand = Runtime.create("i01.rightHand","InMoovHand")
# tweaking defaults settings of right hand
rightHand.thumb.map(0,180,0,180)
rightHand.index.map(0,180,0,180)
rightHand.majeure.map(0,180,0,180)
rightHand.ringFinger.map(0,180,0,180)
rightHand.pinky.map(0,180,0,180)
rightHand.wrist.map(0,180,90,0)
#################
 
rightArm = Runtime.create("i01.rightArm","InMoovArm")
# tweak default RightArm
rightArm.bicep.map(0,180,6,95)
rightArm.rotate.map(0,180,40,160)
rightArm.shoulder.map(0,180,160,30)
rightArm.omoplate.map(0,180,0,70)
 
# InMoov has a forward servo, i'm adding
forwardServo = Runtime.create("forwardServo","Servo")
 
######################################################################
# Now START the services that have been created and initialized
######################################################################
i01 = Runtime.start("i01","InMoov")
i01.setMute(False)
######################################################################
 
################## 
# Start the Head and Mouth
################## 
i01.startHead(leftPort)
i01.startMouthControl(leftPort)
i01.mouthControl.setmouth(0,180) #105close, 160open
i01.startMouth()
i01.mouth = mouth
 
################## 
# Start the Arms
################## 
i01.startRightArm(rightPort)    
i01.startRightHand(rightPort)
i01.startLeftArm(leftPort)    
i01.startLeftHand(leftPort)
i01.startTorso(leftPort)
 
################## 
# Start OpenCV
################## 
#opencv = i01.startOpenCV()
 
################## 
# Start the PIR Sensor
################## 
#i01.startPIR(leftPort,23)
 
#def input():
#    print 'python object is ', msg_clock_pulse
#    pin = msg_i01_right_publishPin.data[0]
#    print 'pin data is ', pin.pin, pin.value
#    if (pin.value == 1):
#        i01.mouth.speak("I was dreaming")
#        powerup()
#        resting()
 
i01.speakBlocking("It feels good to be alive.")
neopixel.animationStop()
 
######################################################################
# Launch the web gui and create the webkit speech recognition gui in CHROME
#################################################################
webgui.autoStartBrowser(False)
webgui.startService()
BareBonesBrowserLaunch.openURL("http://localhost:8888/#service/i01.ear")
 
######################################################################
# END MAIN SERVICE SETUP SECTION
######################################################################
ear.setLanguage("en-EN")
python.subscribe(ear.getName(),"publishText")
 
ear.addCommand("relax", "python", "resting")
 
################## 
# set default speeds */sec
################## 
rightArm.bicep.setVelocity(50)
rightArm.rotate.setVelocity(50)
rightArm.shoulder.setVelocity(50)
rightArm.omoplate.setVelocity(50)
 
leftArm.bicep.setVelocity(50)
leftArm.rotate.setVelocity(50)
leftArm.shoulder.setVelocity(50)
leftArm.omoplate.setVelocity(50)
 
rightHand.thumb.setVelocity(55)
rightHand.index.setVelocity(75)
rightHand.majeure.setVelocity(75)
rightHand.ringFinger.setVelocity(75)
rightHand.pinky.setVelocity(75)
rightHand.wrist.setVelocity(75)
 
leftHand.thumb.setVelocity(55)
leftHand.index.setVelocity(75)
leftHand.majeure.setVelocity(75)
leftHand.ringFinger.setVelocity(75)
leftHand.pinky.setVelocity(75)
leftHand.wrist.setVelocity(75)
 
head.rothead.setVelocity(75)
head.neck.setVelocity(75)
head.eyeX.setVelocity(30)
head.eyeY.setVelocity(30)
 
torso.topStom.setVelocity(75)
torso.midStom.setVelocity(75)
 
################## 
# def command movements
################## 
 
python.subscribe(ear.getName(),"onRecognized")
def onRecognized(text):
  if DEBUG==1:
    print "onRecognized : ",text
 
### RIGHT ARM ############################## [ # ] denotes anatomical neutral
def rightArmBic(myVar):
  rightArm.bicep.moveTo(myVar)
  ### EXTENSION <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> FLEXION
 
def rightArmRot(myVar):
  rightArm.rotate.moveTo(myVar)
  ### INTERNAL ROT <-- 0 * 25 * 45 * [70] * 90 * 110 * 135 * 155 * 180 --> EXTERNAL ROT
 
def rightArmSho(myVar):
  rightArm.shoulder.moveTo(myVar)
  ### NEUTRAL <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> FWD FLEXION
 
def rightArmOmo(myVar):
  rightArm.omoplate.moveTo(myVar)
  ### ADDUCTION <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> ABDUCTION
 
def RightWrist(myVar):
  rightHand.wrist.moveTo(myVar)
  ### PRONATION <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> SUPINATION
 
### LEFT ARM ##############################
def leftArmBic(myVar):
  leftArm.bicep.moveTo(myVar)
  ### EXTENSION <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> FLEXION
 
def leftArmRot(myVar):
  leftArm.rotate.moveTo(myVar)
  ### INTERNAL ROT <-- 0 * 25 * 45 * [70] * 90 * 110 * 135 * 155 * 180 --> EXTERNAL ROT
 
def leftArmSho(myVar):
  leftArm.shoulder.moveTo(myVar)
  ### NEUTRAL <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> FWD FLEXION
 
def leftArmOmo(myVar):
  leftArm.omoplate.moveTo(myVar)
  ### ADDUCTION <-- [0] * 25 * 45 * 70 * 90 * 110 * 135 * 155 * 180 --> ABDUCTION
 
def leftWrist(myVar):
  leftHand.wrist.moveTo(myVar)
  ### PRONATION <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> SUPINATION
 
### HEAD ##############################
def headTurn(myVar):
  head.rothead.moveTo(myVar)
  ### HEAD LEFT <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> HEAD RIGHT
 
def headVert(myVar):
  head.neck.moveTo(myVar)
  ### HEAD DOWN <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> HEAD UP
 
### EYES ##############################
def eyesleft():
  head.eyeX.moveTo(0)
def eyesright():
  head.eyeX.moveTo(180)
def eyesstraight():
  head.eyeX.moveTo(90)
  head.eyeY.moveTo(90)
def eyesup():
  head.eyeY.moveTo(0)
def eyesdown():
  head.eyeY.moveTo(180)
 
### TORSO ##############################
def torsoLean(myVar):
  torso.topStom.moveTo(myVar)
  ### LEAN LEFT <-- 0 * 23 * 45 * 67 * [90] * 112 * 135 * 147 * 180 --> LEAN RIGHT
 
def torsoTurn(myVar):
  torso.midStom.moveTo(myVar)
  ### TURN LEFT <-- 0 * 23 * 45 * 67 * [90] * 112 * 135 * 147 * 180 --> TURN RIGHT
 
def standstraight():
  torsoLean(90)
  torsoTurn(90)
 
######################################################################
# Helper functions and various gesture definitions
######################################################################
def autoHeadTest():
  #neopixel.setAnimation("Rainbow", 255, 255, 255, 1)
  ear.pauseListening()
  i01.setHeadSpeed(0.5,0.5)
  mouth.speakBlocking("Testing my head and neck functions")
  ### HEAD LEFT <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> HEAD RIGHT
  ### HEAD DOWN <-- 0 * 25 * 45 * 70 * [90] * 110 * 135 * 155 * 180 --> HEAD UP
 
  #test max
  mouth.speakBlocking("Looking low and left")
  headVert(0)
  headTurn(0)
  sleep(1)
  mouth.speakBlocking("Looking high and left")
  headVert(180)
  sleep(1)
  mouth.speakBlocking("Back to neutral")
  headVert(90)
  headTurn(90)
  sleep(1)
  mouth.speakBlocking("Looking low and right")
  headVert(0)
  headTurn(180)
  sleep(1)
  mouth.speakBlocking("Looking high and right")
  headVert(180)
  sleep(1)
  mouth.speakBlocking("Back to neutral")
  headVert(90)
  headTurn(90)
  sleep(1)
 
  #test mid
  mouth.speakBlocking("Looking kind of low and left")
  headVert(45)
  headTurn(45)
  sleep(1)
  mouth.speakBlocking("Looking kind of high and left")
  headVert(135)
  sleep(1)
  mouth.speakBlocking("Back to neutral")
  headVert(90)
  headTurn(90)
  sleep(1)
  mouth.speakBlocking("Looking kind of low and right")
  headVert(45)
  headTurn(135)
  sleep(1)
  mouth.speakBlocking("Looking kind of high and right")
  headVert(135)
  sleep(1)
  mouth.speakBlocking("Back to neutral")
  headVert(90)
  headTurn(90)
  mouth.speakBlocking("Testing my head and neck functions is complete")
  #neopixel.animationStop()
  ear.resumeListening()
 
i01.loadGestures(gesturesPath)
 
moz4r's picture

about jaw you can try , after

about jaw you can try , after the map :

i01.mouthControl.setmouth(0,180)
head.jaw.setVelocity(-1)

 

 

 

kwatters's picture

start / end speaking callbacks.

Hi Mayaway,

  The way mouth control works is by subscribing to 2 events from the speech synthesis service.  The publishStartSpeaking and publishEndSpeaking events tell mouth control when the robot starts playing the audio and finishes playing the audio.  These events trigger the mouth control to annimate the jaw servo.

  So,,  I looked at the code and it's a bit confusing, and there might be a bug there.  I suspect the issue is really that these methods aren't being properly subscribed to in your script.

you could try something like the following:

i01.mouthControl.setMouth(i01.mouth)  

I see in the MarySpeech service that this will trigger the subscribtion to the publishStartSpeaking .. but I don't see that it subscribes to publishEndSpeaking.

So...  long story short that might help / fix it.. but the true solution is to make sure that the mouth control service subscribes to these 2 events from the mouth / speech synthesis service... 

I hope this helps explain a little bit more about what's going on here.

Good luck!

-Kevin

mayaway's picture

Here is all of the mouth code

Here is all of the mouth code condensed:

mouth = Runtime.createAndStart("i01.mouth", "MarySpeech")

mouth.setVoice(voiceType)

ear.addMouth(mouth)

htmlfilter.addTextListener(mouth)

# insert here ?

i01.startMouthControl(leftPort)

i01.mouthControl.setmouth(0,180) #105close, 160open

i01.startMouth()

i01.mouth = mouth

 

So I'm guessing (since there is no documentation) that I might "# insert here?" (above) 

i01.mouthControl.setMouth(i01.mouth)  

and since 'mouth'='i01.mouth' I should be able to just enter

i01.mouthControl.setMouth(mouth)  

 

I have NO IDEA how to subscribe to publishEndSpeaking nor that I even need to, or that it even exists!!! (and how would I? )

 

In other news:

I continue to be baffled by a total loss of verbal control as if all Gesture and/or AIML files are simply not there.

My paths and folder names are spot on. Could there be a bug in one of the files that breaks the works?

I can't see any.

 

The NoWorky process is a compete mystery. My notification "Sending NoWorky" never provides an indication that it is complete.

What should happen? Then what happens next if you actually get one? We must have to pre-arrange the send so you know what you are looking at?

and who is looking? I get that it's all volunteer, work for free, open source, and all this must be part of the down side...

I'm on a clean install of Win7, Chrome, Java, MRL just for running the InMoov...

 

Thanks! It's a weird pleasure to be on the user end of the experience as I am part of the dev team for a large project as well.

It gives me insights to how our users feel about dev cycles, changes and all. How would they know what we've done!

 
mayaway's picture

setMouth

i01.mouth.setMouth(i01.mouth) ERROR::: Object Has No Attribute setMouth!!!

I think I'm done. I'm heading over to EZ-Robot were all this is, well, easy...

S~