diff options
Diffstat (limited to 'noetic-llama/src')
| -rw-r--r-- | noetic-llama/src/ollamawrapper/CMakeLists.txt | 2 | ||||
| -rw-r--r-- | noetic-llama/src/ollamawrapper/setup.py | 9 | ||||
| -rw-r--r-- | noetic-llama/src/ollamawrapper/src/capabilities/__init__.py | 3 | ||||
| -rw-r--r-- | noetic-llama/src/ollamawrapper/src/capabilities/basicmaths.py | 47 | ||||
| -rw-r--r-- | noetic-llama/src/ollamawrapper/src/capabilities/gestures.py | 82 | ||||
| -rw-r--r-- | noetic-llama/src/ollamawrapper/src/ollamawrapper.py | 98 | ||||
| m--------- | noetic-llama/src/pal_msgs | 0 | ||||
| -rw-r--r-- | noetic-llama/src/whisperwrapper/src/whisperwrapper.py | 2 | 
8 files changed, 192 insertions, 51 deletions
| diff --git a/noetic-llama/src/ollamawrapper/CMakeLists.txt b/noetic-llama/src/ollamawrapper/CMakeLists.txt index 4533887..82b4be4 100644 --- a/noetic-llama/src/ollamawrapper/CMakeLists.txt +++ b/noetic-llama/src/ollamawrapper/CMakeLists.txt @@ -21,7 +21,7 @@ find_package(catkin REQUIRED COMPONENTS  ## Uncomment this if the package has a setup.py. This macro ensures  ## modules and global scripts declared therein get installed  ## See http://ros.org/doc/api/catkin/html/user_guide/setup_dot_py.html -# catkin_python_setup() +catkin_python_setup()  ################################################  ## Declare ROS messages, services and actions ## diff --git a/noetic-llama/src/ollamawrapper/setup.py b/noetic-llama/src/ollamawrapper/setup.py new file mode 100644 index 0000000..221ba55 --- /dev/null +++ b/noetic-llama/src/ollamawrapper/setup.py @@ -0,0 +1,9 @@ +from setuptools import setup +from catkin_pkg.python_setup import generate_distutils_setup + +setup_args = generate_distutils_setup( +    packages = ["capabilities"], +    package_dir = {"": "src"} +) + +setup(**setup_args)
\ No newline at end of file diff --git a/noetic-llama/src/ollamawrapper/src/capabilities/__init__.py b/noetic-llama/src/ollamawrapper/src/capabilities/__init__.py index 7c07dce..daf617c 100644 --- a/noetic-llama/src/ollamawrapper/src/capabilities/__init__.py +++ b/noetic-llama/src/ollamawrapper/src/capabilities/__init__.py @@ -1,2 +1,3 @@  from .weather import * -from .basicmaths import *
\ No newline at end of file +from .basicmaths import * +# from .gestures import *
\ No newline at end of file diff --git a/noetic-llama/src/ollamawrapper/src/capabilities/basicmaths.py b/noetic-llama/src/ollamawrapper/src/capabilities/basicmaths.py index e2491e0..ebad59c 100644 --- a/noetic-llama/src/ollamawrapper/src/capabilities/basicmaths.py +++ b/noetic-llama/src/ollamawrapper/src/capabilities/basicmaths.py @@ -1,5 +1,7 @@ +import rospy -def add(num1=1, num2=2): + +def add(num1, num2):      """Returns the output of two numbers added together      Args: @@ -9,4 +11,45 @@ def add(num1=1, num2=2):      Returns:          int: The sum of the two numbers      """ -    print("%f + %f = %f" % (num1, num2, num1 + num2))
\ No newline at end of file +    print("%f + %f = %f" % (num1, num2, num1 + num2)) +    return num1 + num2 + +def sub(num1, num2): +    """Returns the output of two numbers subtracted from each other + +    Args: +        num1 (int): The first number +        num2 (int): A number to subtract the first number from +    """ +    print("%f - %f = %f" % (num1, num2, num1 - num2)) +    return num1 - num2 + + +def mult(num1, num2): +    """Returns the output of two numbers multiplied with each other + +    Args: +        num1 (int): The first number to multiply together +        num2 (int): The second number to multiply together +    """ +    print("%f × %f = %f" % (num1, num2, num1 * num2)) +    return  num1 * num2 + +def div(num1, num2): +    """Returns the output of two numbers divided with each other + +    Args: +        num1 (int): The enumerator +        num2 (int): The denominator +    """ +    print("%f ÷ %f = %f" % (num1, num2, num1 / num2)) +    return num1 / num2 + +"""class A: +    def funct(): + + +a = A() + +def func(): +    a.funct()""" diff --git a/noetic-llama/src/ollamawrapper/src/capabilities/gestures.py b/noetic-llama/src/ollamawrapper/src/capabilities/gestures.py new file mode 100644 index 0000000..683a7a3 --- /dev/null +++ b/noetic-llama/src/ollamawrapper/src/capabilities/gestures.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python +import sys +import rospy +from move_base_msgs.msg import MoveBaseAction , MoveBaseGoal +import moveit_commander +import actionlib +from pal_interaction_msgs.msg import TtsActionGoal, TtsAction, TtsGoal +from geometry_msgs.msg import PoseStamped + +class arm_move: +    def __init__(self): +        moveit_commander.roscpp_initialize(sys.argv) +        self.ac = actionlib.SimpleActionClient('/tts' , TtsAction) +        self.ac.wait_for_server() +        self.tts_goal = TtsActionGoal() +        self.tts_goal.goal.rawtext.text = 'Hi,_I_am_up' +        self.tts_goal.goal.rawtext.lang_id = 'en_GB' +        self.ac.send_goal(self.tts_goal.goal) +        self.scene = moveit_commander.PlanningSceneInterface() +        self.arm = moveit_commander.MoveGroupCommander('arm') +        self.reference_frame = 'arm_1_link' +        self.arm.set_pose_reference_frame(self.reference_frame) +        self.arm.allow_replanning(True) +        self.target_pose = PoseStamped() +        self.target_pose.header.frame_id = self.reference_frame +        self.object1_move_1 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object1_move_2 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object1_move_3 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object2_move_1 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object2_move_2 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object2_move_3 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object3_move_1 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object3_move_2 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] +        self.object3_move_3 = [0.27 , 0.30 , -1.47 , 1.63 , -0.70 , -0.11 , 0.94] + +    def goto_obj1(self): +        """ +        A Function for reaching to object 1 # change the name of the object here +        """ +        self.arm.go(self.object1_move_1 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object1_move_2 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object1_move_3 , wait=True) + +    def goto_obj2(self): +        """ +        A Function for reaching to object 1 # change the name of the object here +        """ +        self.arm.go(self.object2_move_1 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object2_move_2 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object2_move_3 , wait=True) + +    def goto_obj3(self): +        """ +        A Function for reaching to object 1 # change the name of the object here +        """ +        self.arm.go(self.object3_move_1 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object3_move_2 , wait=True) +        rospy.sleep(5) +        self.arm.go(self.object3_move_3 , wait=True) + +    def speak(self, params): +        """ +        A function to speaking  +         +        """ +        self.goal.goal.rawtext.text = params # 'Please_take_the_bottle_from_my_hand' +        self.ac.send_goal(self.goal.goal  ) + +arm = arm_move() + +def speak(tosay): +    """Function to make the tiago speak arbitrary words + +    Args: +        tosay (string): A string to say using the speaker +    """ +    arm.speak(tosay)
\ No newline at end of file diff --git a/noetic-llama/src/ollamawrapper/src/ollamawrapper.py b/noetic-llama/src/ollamawrapper/src/ollamawrapper.py index 66843f8..400816e 100644 --- a/noetic-llama/src/ollamawrapper/src/ollamawrapper.py +++ b/noetic-llama/src/ollamawrapper/src/ollamawrapper.py @@ -7,31 +7,33 @@ import typing  import jinja2  import ollama  import rospy +import time  import sys  import os -import capabilities -# yes we need both -from capabilities import * - -ollama_api_url = rospy.get_param("/stt/ollama_api_url", "192.168.122.1:11434") +ollama_api_url = rospy.get_param("/stt/ollama_api_url", "127.0.0.1:11434")  base_ollama_model = rospy.get_param("/stt/ollama_base_model", "nexusraven:13b-v2-q2_K") -@dataclass -class FunctionCapability: -    modulename: str -    module: None -    functioname: str -    function: None -    docstring: str -    argnames: list +def handle_ollama_call(req): +    print("Recieved ollama request '%s'" % req.input) +    o = main(req.input) +    # print(o.keys()) +    return OllamaCallResponse( +        o["total_duration"], +        o["load_duration"], +        o["prompt_eval_duration"], +        o["eval_count"], +        o["eval_duration"] +    ) -class FunctionCapablilites(list): -    def to_modelfile(self, model): -        environment = jinja2.Environment(loader = jinja2.FileSystemLoader(os.path.dirname(__file__))) -        template = environment.get_template("Modelfile.jinja2") +rospy.init_node("ollama_wrapper_server") -        return template.render(functioncapabilities = self, model = model) +print("hello?") +import capabilities +from capabilities import * +s = rospy.Service("/stt/ollamacall", OllamaCall, handle_ollama_call) +print("spin") +rospy.spin()  def getfunctioncapabilities():      functioncapabilities = FunctionCapablilites() @@ -53,45 +55,49 @@ def getfunctioncapabilities():      return functioncapabilities +@dataclass +class FunctionCapability: +    modulename: str +    module: None +    functioname: str +    function: None +    docstring: str +    argnames: list + +class FunctionCapablilites(list): +    def to_modelfile(self, model): +        environment = jinja2.Environment(loader = jinja2.FileSystemLoader(os.path.dirname(__file__))) +        template = environment.get_template("Modelfile.jinja2") + +        return template.render(functioncapabilities = self, model = model) +  def get_functions(ollama_output):      return [f.strip() for f in ollama_output[8:].strip().split(";") if f != ""] -def main(prompt): -    functioncapabilities = getfunctioncapabilities() -    modelfile = functioncapabilities.to_modelfile(base_ollama_model) +MODEL_NAME = "noeticllama:" + str(int(time.time())) +client = ollama.Client(host = "http://%s" % ollama_api_url) -    client = ollama.Client(host = "http://%s" % ollama_api_url) +for mn in [m["name"] for m in client.list()["models"]]: +    if mn.startswith("noeticllama"): +        client.delete(mn) -    client.create(model = "temp", modelfile = modelfile) +functioncapabilities = getfunctioncapabilities() +modelfile = functioncapabilities.to_modelfile(base_ollama_model) +client.create(model = MODEL_NAME, modelfile = modelfile) +def main(prompt):      # with open("Modelfile", "r") as f:      #    ollama.create(model = "temp", modelfile= f.read()) -    ollama_output = client.generate(model='temp', prompt = prompt, options={"stop": ["Thought:"]}) +    ollama_output = client.generate( +        model = MODEL_NAME,  +        prompt = prompt,  +        options = {"stop": ["Thought:"]}, +        keep_alive = "30m" +    )      #print(ollama_output)      for func_str in get_functions(ollama_output["response"]):          rospy.loginfo("Generated function: " + func_str + ":")          exec(func_str) -    client.delete("temp") -    return ollama_output - -def handle_ollama_call(req): -    print("Recieved ollama request '%s'" % req.input) -    o = main(req.input) -    # print(o.keys()) -    return OllamaCallResponse( -        o["total_duration"], -        o["load_duration"], -        o["prompt_eval_duration"], -        o["eval_count"], -        o["eval_duration"] -    ) - -def handle_ollama_server(): -    rospy.init_node("ollama_wrapper_server") -    s = rospy.Service("/stt/ollamacall", OllamaCall, handle_ollama_call) -    rospy.spin() - -if __name__ == "__main__": -    handle_ollama_server()
\ No newline at end of file +    return ollama_output
\ No newline at end of file diff --git a/noetic-llama/src/pal_msgs b/noetic-llama/src/pal_msgs new file mode 160000 +Subproject d101a28f5911e8e5d3c8d42eefdf1dc3924821a diff --git a/noetic-llama/src/whisperwrapper/src/whisperwrapper.py b/noetic-llama/src/whisperwrapper/src/whisperwrapper.py index 439c8de..4cd703b 100644 --- a/noetic-llama/src/whisperwrapper/src/whisperwrapper.py +++ b/noetic-llama/src/whisperwrapper/src/whisperwrapper.py @@ -12,7 +12,7 @@ import time  import json  import os -whisper_api_url = rospy.get_param("/stt/whisper_api_url", "192.168.122.1:9000") +whisper_api_url = rospy.get_param("/stt/whisper_api_url", "127.0.0.1:9000")  pause = rospy.get_param("/stt/speech_recogn_pause_time", 0.8)  energy = rospy.get_param("/stt/speech_recogn_energy", 400)   dynamic_energy = rospy.get_param("/stt/speech_recogn_dyn_energy_flag", False) | 
