diff --git a/docker/Dockerfile b/docker/Dockerfile index 4a03a82..e60274c 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -39,11 +39,11 @@ RUN apt-get update \ # Adapt pip install option based on distro (technically ubuntu version) RUN if [ "$ROS_DISTRO" = "humble" ]; then \ - pip install openai; \ + pip install openai ollama; \ elif [ "$ROS_DISTRO" = "iron" ]; then \ - pip install openai; \ + pip install openai ollama; \ else \ - pip install openai --break-system-packages; \ + pip install openai ollama --break-system-packages; \ fi # Build and source colcon workspace diff --git a/ros2ai/api/__init__.py b/ros2ai/api/__init__.py index 6dbd6fe..25e225a 100644 --- a/ros2ai/api/__init__.py +++ b/ros2ai/api/__init__.py @@ -20,20 +20,24 @@ import ros2ai.api.constants as constants def add_global_arguments(parser): - # add global arguments + # add global arguments + default_models = constants.ROS_OPENAI_DEFAULT_MODEL + ', ' + constants.ROS_OLLAMA_DEFAULT_MODEL parser.add_argument( '-m', '--model', metavar='', type=str, default=None, - help=f'Set OpenAI API model (default %(default)s) or ' + help=f'Set OpenAI API model (e.g {default_models}) or ' f'use {constants.ROS_OPENAI_MODEL_NAME_ENV_VAR} environment variable. (argument prevails)') parser.add_argument( '-u', '--url', metavar='', type=str, default=None, - help='Set OpenAI API endpoint URL (default %(default)s) or ' + help=f'Set OpenAI API endpoint URL (e.g {constants.ROS_OPENAI_DEFAULT_ENDPOINT}) or ' f'use {constants.ROS_OPENAI_ENDPOINT_ENV_VAR} environment variable. (argument prevails)') parser.add_argument( '-t', '--token', metavar='', type=int, default=None, help='Set OpenAI API maximum token (default %(default)s)') +# TODO(@fujitatomoya): replace curl command based API call into python API. +# There is no need to call http request bypass provided Python API calls if available. +# curl https://api.openai.com/v1/models -H "Authorization: Bearer XXXXX" def curl_get_request(url, headers=None) -> Tuple[bool, Optional[List[str]]]: response_list = [] # only supports basic curl subprocess command @@ -63,3 +67,28 @@ def curl_get_request(url, headers=None) -> Tuple[bool, Optional[List[str]]]: except Exception as e: print(f"Error executing curl command: {e}") return False, None + + +# curl http://localhost:11434/api/tags +def ollama_get_models(url, headers=None) -> Tuple[bool, Optional[List[str]]]: + # only supports basic curl subprocess command + curl_cmd = ["curl", url] + + if headers: + for key, value in headers.items(): + curl_cmd.extend(["-H", f"{key}: {value}"]) + + try: + # execute the curl command in subprocess + result = subprocess.run(curl_cmd, capture_output=True, text=True, check=True) + if result.returncode == 0: + data = json.loads(result.stdout) + response_list = [d.get('name') + ' ' + d.get('modified_at') for d in data['models']] + return True, response_list + else: + # this means, subprocess returns failure + print(result.stdout) + return False, None + except Exception as e: + print(f"Error executing curl command: {e}") + return False, None diff --git a/ros2ai/api/config.py b/ros2ai/api/config.py index e2be592..55e5e11 100644 --- a/ros2ai/api/config.py +++ b/ros2ai/api/config.py @@ -23,10 +23,7 @@ def get_api_key() -> str: """ Get OpenAI API Key from OPENAI_API_KEY environment variable. - OpenAI API Key must be set by ros2ai user with OPENAI_API_KEY environment variable. - - :return: string of OpenAI API Key. - :raises: if OPENAI_API_KEY is not set. + :return: string of OpenAI API Key otherwise return None. """ key_name = os.environ.get(constants.ROS_OPENAI_API_KEY_ENV_VAR, "None") return key_name @@ -35,13 +32,11 @@ def get_ai_model() -> str: """ Get OpenAI Model from OPENAI_MODEL_NAME environment variable. - OpenAI Model is optional, in default to gpt-4o - :return: string of OpenAI Model. """ model_name = os.environ.get(constants.ROS_OPENAI_MODEL_NAME_ENV_VAR) if not model_name: - # TODO(@fujitatomoya):better to print info here that using default model. + print('AI model is not set, defaults to ' + constants.ROS_OPENAI_DEFAULT_MODEL) return constants.ROS_OPENAI_DEFAULT_MODEL else: return model_name @@ -50,13 +45,12 @@ def get_endpoint_url() -> str: """ Get OpenAI API service endpoint URL from OPENAI_ENDPOINT environment variable. - OpenAI API service endpoint URL is optional, in default fallback to openai. - :return: string of OpenAI API service endpoint URL, could be None. """ url = os.environ.get(constants.ROS_OPENAI_ENDPOINT_ENV_VAR) # TODO(@fujitatomoya):check if that is valid url before return. if not url: + print('AI model is not set, defaults to ' + constants.ROS_OPENAI_DEFAULT_ENDPOINT) return constants.ROS_OPENAI_DEFAULT_ENDPOINT else: return url @@ -134,7 +128,7 @@ def is_api_key_valid(self): messages = [ { "role": "user", - "content": "Are you in service?", + "content": "Are you available?", }, ], temperature = self.get_value('api_temperature'), diff --git a/ros2ai/api/constants.py b/ros2ai/api/constants.py index 9b3e606..b6e89b1 100644 --- a/ros2ai/api/constants.py +++ b/ros2ai/api/constants.py @@ -12,12 +12,17 @@ # See the License for the specific language governing permissions and # limitations under the License. +# This is only required when actually accessing api.openai.com ROS_OPENAI_API_KEY_ENV_VAR = 'OPENAI_API_KEY' ROS_OPENAI_DEFAULT_MODEL = 'gpt-4o' +ROS_OLLAMA_DEFAULT_MODEL = 'llama3.1' + ROS_OPENAI_MODEL_NAME_ENV_VAR = 'OPENAI_MODEL_NAME' ROS_OPENAI_DEFAULT_ENDPOINT = 'https://api.openai.com/v1' +ROS_OLLAMA_DEFAULT_ENDPOINT = 'http://localhost:11434/v1' + ROS_OPENAI_ENDPOINT_ENV_VAR = 'OPENAI_ENDPOINT' # The system message helps set the behavior of the assistant. diff --git a/ros2ai/api/utils.py b/ros2ai/api/utils.py index 4062b74..c82b781 100644 --- a/ros2ai/api/utils.py +++ b/ros2ai/api/utils.py @@ -79,8 +79,8 @@ def get_ros_distro() -> str: """ distro = os.environ.get('ROS_DISTRO') if not distro: - print('ROS_DISTRO env value is not set.') - return None + print('ROS_DISTRO env value is not set, use rolling') + return 'rolling' return distro.lower() def truncate_before_substring(*, original, substring) -> str: diff --git a/ros2ai/verb/exec.py b/ros2ai/verb/exec.py index 0e543d9..020dd19 100644 --- a/ros2ai/verb/exec.py +++ b/ros2ai/verb/exec.py @@ -56,8 +56,6 @@ def main(self, *, args): print('Please insert your request! (I am not AI)') distro = get_ros_distro() - if distro is None: - distro = 'rolling' # fallback to rolling in default system_role = get_role_system(default_role_system=ROLE_SYSTEM_EXEC_DEFAULT) if args.role and args.role != system_role: system_role = args.role diff --git a/ros2ai/verb/query.py b/ros2ai/verb/query.py index 1e27012..6e25a20 100644 --- a/ros2ai/verb/query.py +++ b/ros2ai/verb/query.py @@ -57,8 +57,6 @@ def main(self, *, args): print('Dont be shy, put some questions! (I am not AI)') distro = get_ros_distro() - if distro is None: - distro = 'rolling' # fallback to rolling in default system_role = get_role_system(default_role_system=ROLE_SYSTEM_QUERY_DEFAULT) if args.role and args.role != system_role: system_role = args.role diff --git a/scripts/docker_release.sh b/scripts/docker_release.sh index 1cd6a83..e5a15e2 100755 --- a/scripts/docker_release.sh +++ b/scripts/docker_release.sh @@ -40,6 +40,7 @@ function print_usage() { } function exit_trap() { + # shellcheck disable=SC2317 # Don't warn about unreachable commands in this function if [ $? != 0 ]; then echo "Command [$BASH_COMMAND] is failed" exit 1 @@ -138,7 +139,8 @@ fi # verifying images if [ "$verify_image" = true ]; then - # TODO@fujitatomoya: need to check if OPENAI_API_KEY env is set, otherwise return error + # TODO@fujitatomoya: need to check if ollama system service is running OR + # check if OPENAI_API_KEY env is set, otherwise return error verify_images fi diff --git a/scripts/github_workflows.sh b/scripts/github_workflows.sh index e633ca1..a732c55 100755 --- a/scripts/github_workflows.sh +++ b/scripts/github_workflows.sh @@ -21,6 +21,7 @@ function mark { } function exit_trap() { + # shellcheck disable=SC2317 # Don't warn about unreachable commands in this function if [ $? != 0 ]; then echo "Command [$BASH_COMMAND] is failed" exit 1 @@ -40,9 +41,9 @@ function install_prerequisites () { # TODO@fujitatomoya: should install openai via package.xml apt install -y pip if [ $UBUNTU_VERSION == "24.04" ]; then - pip install openai --break-system-packages + pip install openai ollama --break-system-packages else - pip install openai + pip install openai ollama fi #apt install -y ros-${ROS_DISTRO}-desktop --no-install-recommends cd $there @@ -62,6 +63,7 @@ function build_colcon_package () { source /opt/ros/${ROS_DISTRO}/setup.bash cd ${COLCON_WORKSPACE} # TODO@fujitatomoya: extend this with `colcon test`. + # Once ollama is supported, we can integrate `verification.sh` via github workflow as 1st. colcon build --symlink-install --packages-select ros2ai } diff --git a/scripts/verification.sh b/scripts/verification.sh index ab20177..e653765 100755 --- a/scripts/verification.sh +++ b/scripts/verification.sh @@ -29,7 +29,7 @@ command_list=( "ros2 ai query -h" "ros2 ai query \"say hello\"" "ros2 ai query \"say hello\" -nv" - "ros2 ai query \"say hello\" -m gpt-3.5-turbo -u https://api.openai.com/v1 -t 100" + "ros2 ai query \"say hello\" -m gpt-4 -u https://api.openai.com/v1 -t 100" "ros2 ai exec \"give me all topics\"" "ros2 ai exec \"give me all topics\" --dry-run" "ros2 ai exec \"give me all topics\" -d" @@ -40,6 +40,7 @@ command_list=( ######################## function exit_trap() { + # shellcheck disable=SC2317 # Don't warn about unreachable commands in this function if [ $? != 0 ]; then echo "Command [$BASH_COMMAND] is failed" exit 1 @@ -49,10 +50,8 @@ function exit_trap() { function check_user_setting () { trap exit_trap ERR echo "[${FUNCNAME[0]}]: checking user setting and configuration." - # check if API key is set if [ -z "$OPENAI_API_KEY" ]; then - echo "OPENAI_API_KEY is not set." - exit 1 + echo "OPENAI_API_KEY is not set, if accessing OpenAI it will fail to call API." fi # check if ros2 envirnoment setting (trap function can catch the error) ros2