Skip to content

Commit

Permalink
Merge pull request #79 from ruanrongman/main
Browse files Browse the repository at this point in the history
fix iot tool ModuleNotFoundError
  • Loading branch information
Undertone0809 authored Sep 20, 2023
2 parents fedd72b + ec45017 commit 108e078
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 12 deletions.
1 change: 1 addition & 0 deletions promptulate/client/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
HumanFeedBackTool
)
from promptulate.tools.shell import ShellTool

from promptulate.utils import set_proxy_mode, print_text

MODEL_MAPPING = {"OpenAI": ChatOpenAI, "ErnieBot": ErnieBot}
Expand Down
1 change: 1 addition & 0 deletions promptulate/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,7 @@ def __init__(self):
self.ernie_bot_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions"
self.ernie_bot_turbo_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant"
self.ernie_bot_token_url = "https://aip.baidubce.com/oauth/2.0/token"
self.ernie_embedding_v1_url = "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/embeddings/embedding-v1"
self.key_default_retry_times = 5
"""If llm(like OpenAI) unable to obtain data, retry request until the data is obtained."""
self.enable_stdout_hook = True
Expand Down
5 changes: 4 additions & 1 deletion promptulate/tools/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@
SemanticScholarReferenceTool,
SemanticScholarCitationTool,
)
from promptulate.tools.shell import ShellTool

from promptulate.tools.sleep import SleepTool

__all__ = [
Expand All @@ -33,10 +35,11 @@
"ArxivQueryTool",
"SemanticScholarReferenceTool",
"SemanticScholarCitationTool",
"IotSwitchTool",
"SemanticScholarQueryTool",
"PythonREPLTool",
"ShellTool",
"Calculator",
"SleepTool",
"IotSwitchTool",
"HumanFeedBackTool",
]
6 changes: 2 additions & 4 deletions promptulate/tools/iot_swith_mqtt/api_wrapper.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,5 @@
import paho.mqtt.client as mqtt


class IotSwitchAPIWrapper:
def run(self, client: mqtt, topic: str, command: str) -> str:

def run(self, client, topic: str, command: str) -> str:
client.publish(topic, command)
return "ok"
27 changes: 20 additions & 7 deletions promptulate/tools/iot_swith_mqtt/tools.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
import re
from typing import Dict, List

import paho.mqtt.client as mqtt

from promptulate.llms import ChatOpenAI, BaseLLM
from promptulate.tools import Tool
from promptulate.tools.iot_swith_mqtt.api_wrapper import IotSwitchAPIWrapper
Expand All @@ -13,7 +11,7 @@


class IotSwitchTool(Tool):
"""A tool for running python code in a REPL."""
"""A tool for switching operations on various devices"""

name: str = "Iot_Switch_Mqtt"
description: str = (
Expand All @@ -23,17 +21,25 @@ class IotSwitchTool(Tool):
"If the operation of the device is successful, an OK will be returned, otherwise a failure will be returned."
)
llm_prompt_template: StringTemplate = prompt_template
client: mqtt.Client
rule_table: List[Dict]
api_wrapper: IotSwitchAPIWrapper = IotSwitchAPIWrapper()

def __init__(
self,
client,
llm: BaseLLM = None,
client: mqtt.Client = None,
rule_table: List[Dict] = None,
api_wrapper: IotSwitchAPIWrapper = IotSwitchAPIWrapper(),
**kwargs
):
"""
Args:
llm: BaseLLM
client: mqtt.Client
rule_table: List[Dict]
api_wrapper: IotSwitchAPIWrapper
**kwargs
"""
self.api_wrapper = api_wrapper
self.llm: BaseLLM = llm or ChatOpenAI(
temperature=0.1, enable_preset_description=False
)
Expand All @@ -43,6 +49,14 @@ def __init__(
super().__init__(**kwargs)

def _run(self, question: str, *args, **kwargs) -> str:
try:
import paho.mqtt.client as mqtt
except ImportError:
raise ImportError(
"Could not import paho python package. "
"This is needed in order to for IotSwitchTool. "
"Please install it with `pip install paho-mqtt`."
)
if len(self.rule_table) == 0:
raise Exception("rule_table is empty")
else:
Expand All @@ -52,7 +66,6 @@ def _run(self, question: str, *args, **kwargs) -> str:
key = key + str(index) + "." + s["content"] + "\n"
index = index + 1
prompt = self.llm_prompt_template.format(question=question, rule_key=key)
logger.debug(prompt)
llm_output = self.llm(prompt)
return self._process_llm_result(llm_output)

Expand Down

0 comments on commit 108e078

Please sign in to comment.