From 269c8d3ce0ffe30898ed139946a4e85e3988de4a Mon Sep 17 00:00:00 2001 From: Marek Mihok Date: Mon, 12 Feb 2024 12:54:34 +0100 Subject: [PATCH 01/16] feat: init #2250 --- ui/src/chatbot.tsx | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/ui/src/chatbot.tsx b/ui/src/chatbot.tsx index 83bd0b7f9a..6f67bf8582 100644 --- a/ui/src/chatbot.tsx +++ b/ui/src/chatbot.tsx @@ -88,6 +88,18 @@ interface ChatbotMessage { from_user: B } +/** + * Create a chat prompt suggestion. + * + * Chat prompt suggestions are displayed as buttons below the last response in chatbot component. + */ +export interface ChatPromptSuggestion { + /** An identifying name for this component. */ + name: Id + /** The text displayed for this suggestion. */ + label?: S +} + /** Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. */ export interface Chatbot { /** An identifying name for this component. */ @@ -96,12 +108,14 @@ export interface Chatbot { data: Rec /** Chat input box placeholder. Use for prompt examples. */ placeholder?: S - /** The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'. */ + /** The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'. */ events?: S[] /** True to show a button to stop the text generation. Defaults to False. */ generating?: B /** The previous messages to show as the user scrolls up. */ prev_items?: ChatbotMessage[] + /** Clickable prompt suggestions shown below the last response. */ + prompt_suggestions?: ChatPromptSuggestion[] } const processData = (data: Rec) => unpack(data).map(({ content, from_user }) => ({ content, from_user })) @@ -212,6 +226,15 @@ export const XChatbot = (props: Chatbot) => { ))} + {props.prompt_suggestions ? <> + { + props.prompt_suggestions.map(({ name, label }) => { + return + {label} + + }) + } + : null}
{props.generating && From 06b006af5dc0d5170f2129c234da43c068f85ea9 Mon Sep 17 00:00:00 2001 From: Marek Mihok Date: Mon, 12 Feb 2024 21:42:17 +0100 Subject: [PATCH 02/16] feat: layout and handling #2250 --- py/h2o_lightwave/h2o_lightwave/types.py | 51 ++++++++++++++++++- py/h2o_lightwave/h2o_lightwave/ui.py | 23 ++++++++- py/h2o_wave/h2o_wave/types.py | 51 ++++++++++++++++++- py/h2o_wave/h2o_wave/ui.py | 23 ++++++++- r/R/ui.R | 24 ++++++++- .../resources/templates/wave-components.xml | 20 +++++++- .../vscode-extension/component-snippets.json | 18 ++++++- ui/src/chatbot.tsx | 39 +++++++++----- 8 files changed, 226 insertions(+), 23 deletions(-) diff --git a/py/h2o_lightwave/h2o_lightwave/types.py b/py/h2o_lightwave/h2o_lightwave/types.py index 8887cc6b1e..a00021a7c3 100644 --- a/py/h2o_lightwave/h2o_lightwave/types.py +++ b/py/h2o_lightwave/h2o_lightwave/types.py @@ -8245,6 +8245,45 @@ def load(__d: Dict) -> 'ChatCard': ) +class ChatPromptSuggestion: + """Create a chat prompt suggestion displayed as button below the last response in chatbot component. + """ + def __init__( + self, + name: str, + label: str, + ): + _guard_scalar('ChatPromptSuggestion.name', name, (str,), True, False, False) + _guard_scalar('ChatPromptSuggestion.label', label, (str,), False, False, False) + self.name = name + """An identifying name for this component.""" + self.label = label + """The text displayed for this suggestion.""" + + def dump(self) -> Dict: + """Returns the contents of this object as a dict.""" + _guard_scalar('ChatPromptSuggestion.name', self.name, (str,), True, False, False) + _guard_scalar('ChatPromptSuggestion.label', self.label, (str,), False, False, False) + return _dump( + name=self.name, + label=self.label, + ) + + @staticmethod + def load(__d: Dict) -> 'ChatPromptSuggestion': + """Creates an instance of this class using the contents of a dict.""" + __d_name: Any = __d.get('name') + _guard_scalar('ChatPromptSuggestion.name', __d_name, (str,), True, False, False) + __d_label: Any = __d.get('label') + _guard_scalar('ChatPromptSuggestion.label', __d_label, (str,), False, False, False) + name: str = __d_name + label: str = __d_label + return ChatPromptSuggestion( + name, + label, + ) + + class ChatbotCard: """Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. """ @@ -8256,6 +8295,7 @@ def __init__( placeholder: Optional[str] = None, events: Optional[List[str]] = None, generating: Optional[bool] = None, + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None, commands: Optional[List[Command]] = None, ): _guard_scalar('ChatbotCard.box', box, (str,), False, False, False) @@ -8263,6 +8303,7 @@ def __init__( _guard_scalar('ChatbotCard.placeholder', placeholder, (str,), False, True, False) _guard_vector('ChatbotCard.events', events, (str,), False, True, False) _guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False) + _guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False) _guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False) self.box = box """A string indicating how to place this component on the page.""" @@ -8273,9 +8314,11 @@ def __init__( self.placeholder = placeholder """Chat input box placeholder. Use for prompt examples.""" self.events = events - """The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'.""" + """The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.""" self.generating = generating """True to show a button to stop the text generation. Defaults to False.""" + self.prompt_suggestions = prompt_suggestions + """Clickable prompt suggestions shown below the last response.""" self.commands = commands """Contextual menu commands for this component.""" @@ -8286,6 +8329,7 @@ def dump(self) -> Dict: _guard_scalar('ChatbotCard.placeholder', self.placeholder, (str,), False, True, False) _guard_vector('ChatbotCard.events', self.events, (str,), False, True, False) _guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False) + _guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False) _guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False) return _dump( view='chatbot', @@ -8295,6 +8339,7 @@ def dump(self) -> Dict: placeholder=self.placeholder, events=self.events, generating=self.generating, + prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions], commands=None if self.commands is None else [__e.dump() for __e in self.commands], ) @@ -8312,6 +8357,8 @@ def load(__d: Dict) -> 'ChatbotCard': _guard_vector('ChatbotCard.events', __d_events, (str,), False, True, False) __d_generating: Any = __d.get('generating') _guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False) + __d_prompt_suggestions: Any = __d.get('prompt_suggestions') + _guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False) __d_commands: Any = __d.get('commands') _guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False) box: str = __d_box @@ -8320,6 +8367,7 @@ def load(__d: Dict) -> 'ChatbotCard': placeholder: Optional[str] = __d_placeholder events: Optional[List[str]] = __d_events generating: Optional[bool] = __d_generating + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions] commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands] return ChatbotCard( box, @@ -8328,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard': placeholder, events, generating, + prompt_suggestions, commands, ) diff --git a/py/h2o_lightwave/h2o_lightwave/ui.py b/py/h2o_lightwave/h2o_lightwave/ui.py index 9967255f1d..a21320e6cd 100644 --- a/py/h2o_lightwave/h2o_lightwave/ui.py +++ b/py/h2o_lightwave/h2o_lightwave/ui.py @@ -2878,6 +2878,24 @@ def chat_card( ) +def chat_prompt_suggestion( + name: str, + label: str, +) -> ChatPromptSuggestion: + """Create a chat prompt suggestion displayed as button below the last response in chatbot component. + + Args: + name: An identifying name for this component. + label: The text displayed for this suggestion. + Returns: + A `h2o_wave.types.ChatPromptSuggestion` instance. + """ + return ChatPromptSuggestion( + name, + label, + ) + + def chatbot_card( box: str, name: str, @@ -2885,6 +2903,7 @@ def chatbot_card( placeholder: Optional[str] = None, events: Optional[List[str]] = None, generating: Optional[bool] = None, + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None, commands: Optional[List[Command]] = None, ) -> ChatbotCard: """Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. @@ -2894,8 +2913,9 @@ def chatbot_card( name: An identifying name for this component. data: Chat messages data. Requires cyclic buffer. placeholder: Chat input box placeholder. Use for prompt examples. - events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'. + events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'. generating: True to show a button to stop the text generation. Defaults to False. + prompt_suggestions: Clickable prompt suggestions shown below the last response. commands: Contextual menu commands for this component. Returns: A `h2o_wave.types.ChatbotCard` instance. @@ -2907,6 +2927,7 @@ def chatbot_card( placeholder, events, generating, + prompt_suggestions, commands, ) diff --git a/py/h2o_wave/h2o_wave/types.py b/py/h2o_wave/h2o_wave/types.py index 8887cc6b1e..a00021a7c3 100644 --- a/py/h2o_wave/h2o_wave/types.py +++ b/py/h2o_wave/h2o_wave/types.py @@ -8245,6 +8245,45 @@ def load(__d: Dict) -> 'ChatCard': ) +class ChatPromptSuggestion: + """Create a chat prompt suggestion displayed as button below the last response in chatbot component. + """ + def __init__( + self, + name: str, + label: str, + ): + _guard_scalar('ChatPromptSuggestion.name', name, (str,), True, False, False) + _guard_scalar('ChatPromptSuggestion.label', label, (str,), False, False, False) + self.name = name + """An identifying name for this component.""" + self.label = label + """The text displayed for this suggestion.""" + + def dump(self) -> Dict: + """Returns the contents of this object as a dict.""" + _guard_scalar('ChatPromptSuggestion.name', self.name, (str,), True, False, False) + _guard_scalar('ChatPromptSuggestion.label', self.label, (str,), False, False, False) + return _dump( + name=self.name, + label=self.label, + ) + + @staticmethod + def load(__d: Dict) -> 'ChatPromptSuggestion': + """Creates an instance of this class using the contents of a dict.""" + __d_name: Any = __d.get('name') + _guard_scalar('ChatPromptSuggestion.name', __d_name, (str,), True, False, False) + __d_label: Any = __d.get('label') + _guard_scalar('ChatPromptSuggestion.label', __d_label, (str,), False, False, False) + name: str = __d_name + label: str = __d_label + return ChatPromptSuggestion( + name, + label, + ) + + class ChatbotCard: """Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. """ @@ -8256,6 +8295,7 @@ def __init__( placeholder: Optional[str] = None, events: Optional[List[str]] = None, generating: Optional[bool] = None, + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None, commands: Optional[List[Command]] = None, ): _guard_scalar('ChatbotCard.box', box, (str,), False, False, False) @@ -8263,6 +8303,7 @@ def __init__( _guard_scalar('ChatbotCard.placeholder', placeholder, (str,), False, True, False) _guard_vector('ChatbotCard.events', events, (str,), False, True, False) _guard_scalar('ChatbotCard.generating', generating, (bool,), False, True, False) + _guard_vector('ChatbotCard.prompt_suggestions', prompt_suggestions, (ChatPromptSuggestion,), False, True, False) _guard_vector('ChatbotCard.commands', commands, (Command,), False, True, False) self.box = box """A string indicating how to place this component on the page.""" @@ -8273,9 +8314,11 @@ def __init__( self.placeholder = placeholder """Chat input box placeholder. Use for prompt examples.""" self.events = events - """The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'.""" + """The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'.""" self.generating = generating """True to show a button to stop the text generation. Defaults to False.""" + self.prompt_suggestions = prompt_suggestions + """Clickable prompt suggestions shown below the last response.""" self.commands = commands """Contextual menu commands for this component.""" @@ -8286,6 +8329,7 @@ def dump(self) -> Dict: _guard_scalar('ChatbotCard.placeholder', self.placeholder, (str,), False, True, False) _guard_vector('ChatbotCard.events', self.events, (str,), False, True, False) _guard_scalar('ChatbotCard.generating', self.generating, (bool,), False, True, False) + _guard_vector('ChatbotCard.prompt_suggestions', self.prompt_suggestions, (ChatPromptSuggestion,), False, True, False) _guard_vector('ChatbotCard.commands', self.commands, (Command,), False, True, False) return _dump( view='chatbot', @@ -8295,6 +8339,7 @@ def dump(self) -> Dict: placeholder=self.placeholder, events=self.events, generating=self.generating, + prompt_suggestions=None if self.prompt_suggestions is None else [__e.dump() for __e in self.prompt_suggestions], commands=None if self.commands is None else [__e.dump() for __e in self.commands], ) @@ -8312,6 +8357,8 @@ def load(__d: Dict) -> 'ChatbotCard': _guard_vector('ChatbotCard.events', __d_events, (str,), False, True, False) __d_generating: Any = __d.get('generating') _guard_scalar('ChatbotCard.generating', __d_generating, (bool,), False, True, False) + __d_prompt_suggestions: Any = __d.get('prompt_suggestions') + _guard_vector('ChatbotCard.prompt_suggestions', __d_prompt_suggestions, (dict,), False, True, False) __d_commands: Any = __d.get('commands') _guard_vector('ChatbotCard.commands', __d_commands, (dict,), False, True, False) box: str = __d_box @@ -8320,6 +8367,7 @@ def load(__d: Dict) -> 'ChatbotCard': placeholder: Optional[str] = __d_placeholder events: Optional[List[str]] = __d_events generating: Optional[bool] = __d_generating + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None if __d_prompt_suggestions is None else [ChatPromptSuggestion.load(__e) for __e in __d_prompt_suggestions] commands: Optional[List[Command]] = None if __d_commands is None else [Command.load(__e) for __e in __d_commands] return ChatbotCard( box, @@ -8328,6 +8376,7 @@ def load(__d: Dict) -> 'ChatbotCard': placeholder, events, generating, + prompt_suggestions, commands, ) diff --git a/py/h2o_wave/h2o_wave/ui.py b/py/h2o_wave/h2o_wave/ui.py index 9967255f1d..a21320e6cd 100644 --- a/py/h2o_wave/h2o_wave/ui.py +++ b/py/h2o_wave/h2o_wave/ui.py @@ -2878,6 +2878,24 @@ def chat_card( ) +def chat_prompt_suggestion( + name: str, + label: str, +) -> ChatPromptSuggestion: + """Create a chat prompt suggestion displayed as button below the last response in chatbot component. + + Args: + name: An identifying name for this component. + label: The text displayed for this suggestion. + Returns: + A `h2o_wave.types.ChatPromptSuggestion` instance. + """ + return ChatPromptSuggestion( + name, + label, + ) + + def chatbot_card( box: str, name: str, @@ -2885,6 +2903,7 @@ def chatbot_card( placeholder: Optional[str] = None, events: Optional[List[str]] = None, generating: Optional[bool] = None, + prompt_suggestions: Optional[List[ChatPromptSuggestion]] = None, commands: Optional[List[Command]] = None, ) -> ChatbotCard: """Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. @@ -2894,8 +2913,9 @@ def chatbot_card( name: An identifying name for this component. data: Chat messages data. Requires cyclic buffer. placeholder: Chat input box placeholder. Use for prompt examples. - events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'. + events: The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'. generating: True to show a button to stop the text generation. Defaults to False. + prompt_suggestions: Clickable prompt suggestions shown below the last response. commands: Contextual menu commands for this component. Returns: A `h2o_wave.types.ChatbotCard` instance. @@ -2907,6 +2927,7 @@ def chatbot_card( placeholder, events, generating, + prompt_suggestions, commands, ) diff --git a/r/R/ui.R b/r/R/ui.R index 4f603b4053..68a9df79ca 100644 --- a/r/R/ui.R +++ b/r/R/ui.R @@ -3339,14 +3339,33 @@ ui_chat_card <- function( return(.o) } +#' Create a chat prompt suggestion displayed as button below the last response in chatbot component. +#' +#' @param name An identifying name for this component. +#' @param label The text displayed for this suggestion. +#' @return A ChatPromptSuggestion instance. +#' @export +ui_chat_prompt_suggestion <- function( + name, + label) { + .guard_scalar("name", "character", name) + .guard_scalar("label", "character", label) + .o <- list( + name=name, + label=label) + class(.o) <- append(class(.o), c(.wave_obj, "WaveChatPromptSuggestion")) + return(.o) +} + #' Create a chatbot card to allow getting prompts from users and providing them with LLM generated answers. #' #' @param box A string indicating how to place this component on the page. #' @param name An identifying name for this component. #' @param data Chat messages data. Requires cyclic buffer. #' @param placeholder Chat input box placeholder. Use for prompt examples. -#' @param events The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback'. +#' @param events The events to capture on this chatbot. One of 'stop' | 'scroll_up' | 'feedback' | 'prompt_suggestion'. #' @param generating True to show a button to stop the text generation. Defaults to False. +#' @param prompt_suggestions Clickable prompt suggestions shown below the last response. #' @param commands Contextual menu commands for this component. #' @return A ChatbotCard instance. #' @export @@ -3357,6 +3376,7 @@ ui_chatbot_card <- function( placeholder = NULL, events = NULL, generating = NULL, + prompt_suggestions = NULL, commands = NULL) { .guard_scalar("box", "character", box) .guard_scalar("name", "character", name) @@ -3364,6 +3384,7 @@ ui_chatbot_card <- function( .guard_scalar("placeholder", "character", placeholder) .guard_vector("events", "character", events) .guard_scalar("generating", "logical", generating) + .guard_vector("prompt_suggestions", "WaveChatPromptSuggestion", prompt_suggestions) .guard_vector("commands", "WaveCommand", commands) .o <- list( box=box, @@ -3372,6 +3393,7 @@ ui_chatbot_card <- function( placeholder=placeholder, events=events, generating=generating, + prompt_suggestions=prompt_suggestions, commands=commands, view='chatbot') class(.o) <- append(class(.o), c(.wave_obj, "WaveChatbotCard")) diff --git a/tools/intellij-plugin/src/main/resources/templates/wave-components.xml b/tools/intellij-plugin/src/main/resources/templates/wave-components.xml index ae064bd8ab..f72a718546 100644 --- a/tools/intellij-plugin/src/main/resources/templates/wave-components.xml +++ b/tools/intellij-plugin/src/main/resources/templates/wave-components.xml @@ -95,6 +95,13 @@