diff --git a/docs/english/_sidebar.json b/docs/english/_sidebar.json index eab9d94f8..79721bdcd 100644 --- a/docs/english/_sidebar.json +++ b/docs/english/_sidebar.json @@ -7,7 +7,19 @@ }, "tools/bolt-python/getting-started", { "type": "html", "value": "
" }, - "tools/bolt-python/building-an-app", + "tools/bolt-python/creating-an-app", + { + "type": "category", + "label": "AI & Agents", + "link": { + "type": "doc", + "id": "tools/bolt-python/concepts/adding-agent-features" + }, + "items": [ + "tools/bolt-python/concepts/adding-agent-features", + "tools/bolt-python/concepts/using-the-assistant-class" + ] + }, { "type": "category", "label": "Slack API calls", @@ -39,7 +51,6 @@ "tools/bolt-python/concepts/app-home" ] }, - "tools/bolt-python/concepts/ai-apps", { "type": "category", "label": "Custom Steps", @@ -85,11 +96,7 @@ "tools/bolt-python/concepts/token-rotation" ] }, - { - "type": "category", - "label": "Experiments", - "items": ["tools/bolt-python/experiments"] - }, + "tools/bolt-python/experiments", { "type": "category", "label": "Legacy", diff --git a/docs/english/concepts/adding-agent-features.md b/docs/english/concepts/adding-agent-features.md new file mode 100644 index 000000000..cbd164630 --- /dev/null +++ b/docs/english/concepts/adding-agent-features.md @@ -0,0 +1,746 @@ +--- +sidebar_label: Adding agent features +--- + +# Adding agent features with Bolt for Python + +:::tip[Check out the Support Agent sample app] +The code snippets throughout this guide are from our [Support Agent sample app](https://github.com/slack-samples/bolt-python-support-agent), Casey, which supports integration with Pydantic, Anthropic, and OpenAI. + +View our [agent quickstart](/ai/agent-quickstart) to get up and running with Casey. Otherwise, read on for exploration and explanation of agent-focused Bolt features found within Casey. +::: + +Your agent can utilize features applicable to messages throughout Slack, like [chat streaming](#text-streaming) and [feedback buttons](#adding-and-handling-feedback). They can also [utilize the `Assistant` class](/tools/bolt-python/concepts/assistant-class) for a side-panel view designed with AI in mind. + +If you're unfamiliar with using these feature within Slack, you may want to read the [API docs on the subject](/ai/). Then come back here to implement them with Bolt! + +--- + +## Slack MCP Server {#slack-mcp-server} + +Casey can harness the [Slack MCP Server](https://docs.slack.dev/ai/slack-mcp-server/developing) when deployed via an HTTP Server with OAuth. + +To enable the Slack MCP Server: + +1. Install [ngrok](https://ngrok.com/download) and start a tunnel: + +```sh +ngrok http 3000 +``` + +2. Copy the `https://*.ngrok-free.app` URL from the ngrok output. + +3. Update `manifest.json` for HTTP mode: + - Set `socket_mode_enabled` to `false` + - Replace `ngrok-free.app` with your ngrok domain (e.g. `YOUR_NGROK_SUBDOMAIN.ngrok-free.app`) + +4. Create a new local dev app: + +```sh +slack install -E local +``` + +5. Enable MCP for your app: + - Run `slack app settings` to open your app's settings + - Navigate to **Agents & AI Apps** in the left-side navigation + - Toggle **Model Context Protocol** on + +6. Update your `.env` OAuth environment variables: + - Run `slack app settings` to open App Settings + - Copy **Client ID**, **Client Secret**, and **Signing Secret** + - Update `SLACK_REDIRECT_URI` in `.env` with your ngrok domain + +```sh +SLACK_CLIENT_ID=YOUR_CLIENT_ID +SLACK_CLIENT_SECRET=YOUR_CLIENT_SECRET +SLACK_REDIRECT_URI=https://YOUR_NGROK_SUBDOMAIN.ngrok-free.app/slack/oauth_redirect +SLACK_SIGNING_SECRET=YOUR_SIGNING_SECRET +``` + +7. Start the app: + +```sh +slack run app_oauth.py +``` + +8. Click the install URL printed in the terminal to install the app to your workspace via OAuth. + +Your agent can now access the Slack MCP server! + +--- + +## Listening for user invocation + +Agents can be invoked throughout Slack, such as via @mentions in channels, messaging the agent, and using the assistant side panel. + + + + +```python +import re +from logging import Logger + +from agents import Runner +from slack_bolt import BoltContext, Say, SayStream, SetStatus +from slack_sdk import WebClient + +from agent import CaseyDeps, casey_agent +from thread_context import conversation_store +from listeners.views.feedback_builder import build_feedback_blocks + + +def handle_app_mentioned( + client: WebClient, + context: BoltContext, + event: dict, + logger: Logger, + say: Say, + say_stream: SayStream, + set_status: SetStatus, +): + """Handle @Casey mentions in channels.""" + try: + channel_id = context.channel_id + text = event.get("text", "") + thread_ts = event.get("thread_ts") or event["ts"] + user_id = context.user_id + + # Strip the bot mention from the text + cleaned_text = re.sub(r"<@[A-Z0-9]+>", "", text).strip() + + if not cleaned_text: + say( + text="Hey there! How can I help you? Describe your IT issue and I'll do my best to assist.", + thread_ts=thread_ts, + ) + return + + # Add eyes reaction only to the first message (not threaded replies) + if not event.get("thread_ts"): + client.reactions_add( + channel=channel_id, + timestamp=event["ts"], + name="eyes", + ) + ... +``` + + + + +```python +from logging import Logger + +from slack_bolt.context import BoltContext +from slack_bolt.context.say import Say +from slack_bolt.context.say_stream import SayStream +from slack_bolt.context.set_status import SetStatus +from slack_sdk import WebClient + +from agent import CaseyDeps, run_casey_agent +from thread_context import session_store +from listeners.views.feedback_builder import build_feedback_blocks + + +def handle_message( + client: WebClient, + context: BoltContext, + event: dict, + logger: Logger, + say: Say, + say_stream: SayStream, + set_status: SetStatus, +): + """Handle messages sent to Casey via DM or in threads the bot is part of.""" + # Issue submissions are posted by the bot with metadata so the message + # handler can run the agent on behalf of the original user. + is_issue_submission = ( + event.get("metadata", {}).get("event_type") == "issue_submission" + ) + + # Skip message subtypes (edits, deletes, etc.) and bot messages that + # are not issue submissions. + if event.get("subtype"): + return + if event.get("bot_id") and not is_issue_submission: + return + + is_dm = event.get("channel_type") == "im" + is_thread_reply = event.get("thread_ts") is not None + + if is_dm: + pass + elif is_thread_reply: + # Channel thread replies are handled only if the bot is already engaged + session = session_store.get_session(context.channel_id, event["thread_ts"]) + if session is None: + return + else: + # Top-level channel messages are handled by app_mentioned + return + + try: + channel_id = context.channel_id + text = event.get("text", "") + thread_ts = event.get("thread_ts") or event["ts"] + + # Get session ID for conversation context + existing_session_id = session_store.get_session(channel_id, thread_ts) + + # Add eyes reaction only to the first message (DMs only — channel + # threads already have the reaction from the initial app_mention) + if is_dm and not existing_session_id: + await client.reactions_add( + channel=channel_id, + timestamp=event["ts"], + name="eyes", + ) + + ... +``` + + + + + +:::tip[Using the Assistant side panel] +The Assistant side panel requires additional setup. See the [Assistant class guide](/tools/bolt-python/concepts/assistant-class). +::: + + +```py +from logging import Logger + +from slack_bolt.context.set_suggested_prompts import SetSuggestedPrompts + +SUGGESTED_PROMPTS = [ + {"title": "Reset Password", "message": "I need to reset my password"}, + {"title": "Request Access", "message": "I need access to a system or tool"}, + {"title": "Network Issues", "message": "I'm having network connectivity issues"}, +] + + +def handle_assistant_thread_started( + set_suggested_prompts: SetSuggestedPrompts, logger: Logger +): + """Handle assistant thread started events by setting suggested prompts.""" + try: + set_suggested_prompts( + prompts=SUGGESTED_PROMPTS, + title="How can I help you today?", + ) + except Exception as e: + logger.exception(f"Failed to handle assistant thread started: {e}") +``` + + + + +--- + +## Setting status {#setting-assistant-status} + +Your app can show actions are happening behind the scenes by setting its thread status. + +```python +def handle_app_mentioned( + set_status: SetStatus, + ... +): + set_status( + status="Thinking...", + loading_messages=[ + "Teaching the hamsters to type faster…", + "Untangling the internet cables…", + "Consulting the office goldfish…", + "Polishing up the response just for you…", + "Convincing the AI to stop overthinking…", + ], + ) +``` + +--- + +## Streaming messages {#text-streaming} + +You can have your app's messages stream in to replicate conventional agent behavior. Bolt for Python provides a `say_stream` utility as a listener argument available for `app.event` and `app.message` listeners. + +The `say_stream` utility streamlines calling the Python Slack SDK's [`WebClient.chat_stream`](https://docs.slack.dev/tools/python-slack-sdk/reference/web/client.html#slack_sdk.web.client.WebClient.chat_stream) helper utility by sourcing parameter values from the relevant event payload. + +| Parameter | Value | +|---|---| +| `channel_id` | Sourced from the event payload. +| `thread_ts` | Sourced from the event payload. Falls back to the `ts` value if available. +| `recipient_team_id` | Sourced from the event `team_id` (`enterprise_id` if the app is installed on an org). +| `recipient_user_id` | Sourced from the `user_id` of the event. + +If neither a `channel_id` or `thread_ts` can be sourced, then the utility will be `None`. + +```python +streamer = say_stream() +streamer.append(markdown_text="Here's my response...") +streamer.append(markdown_text="And here's more...") +streamer.stop() +``` + +--- + +## Adding and handling feedback {#adding-and-handling-feedback} + +You can use the [feedback buttons block element](/reference/block-kit/block-elements/feedback-buttons-element/) to allow users to immediately provide feedback regarding the app's responses. Here's what the feedback buttons look like from the Support Agent sample app: + +```py title=".../listeners/views/feedback_builder.py" +from slack_sdk.models.blocks import ( + Block, + ContextActionsBlock, + FeedbackButtonObject, + FeedbackButtonsElement, +) + + +def build_feedback_blocks() -> list[Block]: + """Build feedback blocks with thumbs up/down buttons.""" + return [ + ContextActionsBlock( + elements=[ + FeedbackButtonsElement( + action_id="feedback", + positive_button=FeedbackButtonObject( + text="Good Response", + accessibility_label="Submit positive feedback on this response", + value="good-feedback", + ), + negative_button=FeedbackButtonObject( + text="Bad Response", + accessibility_label="Submit negative feedback on this response", + value="bad-feedback", + ), + ) + ] + ) + ] +``` + +That feedback block is then rendered at the bottom of your app's message via the `say_stream` utility. + +```py +... + # Stream response in thread with feedback buttons + streamer = say_stream() + streamer.append(markdown_text=result.output) + feedback_blocks = build_feedback_blocks() + streamer.stop(blocks=feedback_blocks) +... +``` + +You can also add a response for when the user provides feedback. + +```python title="...listeners/actions/feedback_button.py" +from logging import Logger + +from slack_bolt import Ack, BoltContext +from slack_sdk import WebClient + + +def handle_feedback_button( + ack: Ack, body: dict, client: WebClient, context: BoltContext, logger: Logger +): + """Handle thumbs up/down feedback on Casey's responses.""" + ack() + + try: + channel_id = context.channel_id + user_id = context.user_id + message_ts = body["message"]["ts"] + feedback_value = body["actions"][0]["value"] + + if feedback_value == "good-feedback": + client.chat_postEphemeral( + channel=channel_id, + user=user_id, + thread_ts=message_ts, + text="Glad that was helpful! :tada:", + ) + else: + client.chat_postEphemeral( + channel=channel_id, + user=user_id, + thread_ts=message_ts, + text="Sorry that wasn't helpful. :slightly_frowning_face: Try rephrasing your question or I can create a support ticket for you.", + ) + + logger.debug( + f"Feedback received: value={feedback_value}, message_ts={message_ts}" + ) + except Exception as e: + logger.exception(f"Failed to handle feedback: {e}") +``` + +--- + +## Full example + +Putting all those concepts together results in a dynamic agent ready to helpfully respond. + + +
+Full example + + + +```python title="app_mentioned.py" +import re +from logging import Logger + +from slack_bolt import BoltContext, Say, SayStream, SetStatus +from slack_sdk import WebClient + +from agent import CaseyDeps, casey_agent, get_model +from thread_context import conversation_store +from listeners.views.feedback_builder import build_feedback_blocks + + +def handle_app_mentioned( + client: WebClient, + context: BoltContext, + event: dict, + logger: Logger, + say: Say, + say_stream: SayStream, + set_status: SetStatus, +): + """Handle @Casey mentions in channels.""" + try: + channel_id = context.channel_id + text = event.get("text", "") + thread_ts = event.get("thread_ts") or event["ts"] + user_id = context.user_id + + # Strip the bot mention from the text + cleaned_text = re.sub(r"<@[A-Z0-9]+>", "", text).strip() + + if not cleaned_text: + say( + text="Hey there! How can I help you? Describe your IT issue and I'll do my best to assist.", + thread_ts=thread_ts, + ) + return + + # Add eyes reaction only to the first message (not threaded replies) + if not event.get("thread_ts"): + client.reactions_add( + channel=channel_id, + timestamp=event["ts"], + name="eyes", + ) + + # Set assistant thread status with loading messages + set_status( + status="Thinking...", + loading_messages=[ + "Teaching the hamsters to type faster…", + "Untangling the internet cables…", + "Consulting the office goldfish…", + "Polishing up the response just for you…", + "Convincing the AI to stop overthinking…", + ], + ) + + # Get conversation history + history = conversation_store.get_history(channel_id, thread_ts) + + # Run the agent + deps = CaseyDeps( + client=client, + user_id=user_id, + channel_id=channel_id, + thread_ts=thread_ts, + message_ts=event["ts"], + ) + result = casey_agent.run_sync( + cleaned_text, + model=get_model(), + deps=deps, + message_history=history, + ) + + # Stream response in thread with feedback buttons + streamer = say_stream() + streamer.append(markdown_text=result.output) + feedback_blocks = build_feedback_blocks() + streamer.stop(blocks=feedback_blocks) + + # Store conversation history + conversation_store.set_history(channel_id, thread_ts, result.all_messages()) + + except Exception as e: + logger.exception(f"Failed to handle app mention: {e}") + say( + text=f":warning: Something went wrong! ({e})", + thread_ts=event.get("thread_ts") or event["ts"], + ) +``` + + + + +```python title="app_mentioned.py" +import re +from logging import Logger + +from slack_bolt.context import BoltContext +from slack_bolt.context.say import Say +from slack_bolt.context.say_stream import SayStream +from slack_bolt.context.set_status import SetStatus +from slack_sdk import WebClient + +from agent import CaseyDeps, run_casey_agent +from thread_context import session_store +from listeners.views.feedback_builder import build_feedback_blocks + + +def handle_app_mentioned( + client: WebClient, + context: BoltContext, + event: dict, + logger: Logger, + say: Say, + say_stream: SayStream, + set_status: SetStatus, +): + """Handle @Casey mentions in channels.""" + try: + channel_id = context.channel_id + text = event.get("text", "") + thread_ts = event.get("thread_ts") or event["ts"] + + # Strip the bot mention from the text + cleaned_text = re.sub(r"<@[A-Z0-9]+>", "", text).strip() + + if not cleaned_text: + say( + text="Hey there! How can I help you? Describe your IT issue and I'll do my best to assist.", + thread_ts=thread_ts, + ) + return + + # Add eyes reaction only to the first message (not threaded replies) + if not event.get("thread_ts"): + client.reactions_add( + channel=channel_id, + timestamp=event["ts"], + name="eyes", + ) + + # Set assistant thread status with loading messages + set_status( + status="Thinking...", + loading_messages=[ + "Teaching the hamsters to type faster…", + "Untangling the internet cables…", + "Consulting the office goldfish…", + "Polishing up the response just for you…", + "Convincing the AI to stop overthinking…", + ], + ) + + # Get session ID for conversation context + existing_session_id = session_store.get_session(channel_id, thread_ts) + + # Run the agent with deps for tool access + deps = CaseyDeps( + client=client, + user_id=context.user_id, + channel_id=channel_id, + thread_ts=thread_ts, + message_ts=event["ts"], + ) + response_text, new_session_id = run_casey_agent( + cleaned_text, session_id=existing_session_id, deps=deps + ) + + # Stream response in thread with feedback buttons + streamer = say_stream() + streamer.append(markdown_text=response_text) + feedback_blocks = build_feedback_blocks() + streamer.stop(blocks=feedback_blocks) + + # Store session ID for future context + if new_session_id: + session_store.set_session(channel_id, thread_ts, new_session_id) + + except Exception as e: + logger.exception(f"Failed to handle app mention: {e}") + await say( + text=f":warning: Something went wrong! ({e})", + thread_ts=event.get("thread_ts") or event["ts"], + ) +``` + + + +```python title="app_mentioned.py" +import re +from logging import Logger + +from agents import Runner +from slack_bolt import BoltContext, Say, SayStream, SetStatus +from slack_sdk import WebClient + +from agent import CaseyDeps, casey_agent +from thread_context import conversation_store +from listeners.views.feedback_builder import build_feedback_blocks + + +def handle_app_mentioned( + client: WebClient, + context: BoltContext, + event: dict, + logger: Logger, + say: Say, + say_stream: SayStream, + set_status: SetStatus, +): + """Handle @Casey mentions in channels.""" + try: + channel_id = context.channel_id + text = event.get("text", "") + thread_ts = event.get("thread_ts") or event["ts"] + user_id = context.user_id + + # Strip the bot mention from the text + cleaned_text = re.sub(r"<@[A-Z0-9]+>", "", text).strip() + + if not cleaned_text: + say( + text="Hey there! How can I help you? Describe your IT issue and I'll do my best to assist.", + thread_ts=thread_ts, + ) + return + + # Add eyes reaction only to the first message (not threaded replies) + if not event.get("thread_ts"): + client.reactions_add( + channel=channel_id, + timestamp=event["ts"], + name="eyes", + ) + + # Set assistant thread status with loading messages + set_status( + status="Thinking...", + loading_messages=[ + "Teaching the hamsters to type faster…", + "Untangling the internet cables…", + "Consulting the office goldfish…", + "Polishing up the response just for you…", + "Convincing the AI to stop overthinking…", + ], + ) + + # Get conversation history + history = conversation_store.get_history(channel_id, thread_ts) + + # Build input for the agent + if history: + input_items = history + [{"role": "user", "content": cleaned_text}] + else: + input_items = cleaned_text + + # Run the agent + deps = CaseyDeps( + client=client, + user_id=user_id, + channel_id=channel_id, + thread_ts=thread_ts, + message_ts=event["ts"], + ) + result = Runner.run_sync(casey_agent, input=input_items, context=deps) + + # Stream response in thread with feedback buttons + streamer = say_stream() + streamer.append(markdown_text=result.final_output) + feedback_blocks = build_feedback_blocks() + streamer.stop(blocks=feedback_blocks) + + # Store conversation history + conversation_store.set_history(channel_id, thread_ts, result.to_input_list()) + + except Exception as e: + logger.exception(f"Failed to handle app mention: {e}") + say( + text=f":warning: Something went wrong! ({e})", + thread_ts=event.get("thread_ts") or event["ts"], + ) +``` + + + +
+ +--- + +## Onward: adding custom tools + +Casey comes with test tools and simulated systems. You can extend it with custom tools to make it a fully functioning Slack agent. + +In this example, we'll add a tool that makes live calls to check the GitHub status. + +1. Create `agent/tools/{tool-name}.py` and define the tool with the `@tool` decorator: + +```python title="agent/tools/check_github_status.py" +from claude_agent_sdk import tool +import httpx + +@tool( + name="check_github_status", + description="Check GitHub's current operational status", + input_schema={}, +) +async def check_github_status_tool(args): + """Check if GitHub is operational.""" + async with httpx.AsyncClient() as client: + response = await client.get("https://www.githubstatus.com/api/v2/status.json") + data = response.json() + status = data["status"]["indicator"] + description = data["status"]["description"] + + return { + "content": [ + { + "type": "text", + "text": f"**GitHub Status** — {status}\n{description}", + } + ] + } +``` + +2. Import the tool in `agent/casey.py`: + +```python title="agent/casey.py" +from agent.tools import check_github_status_tool +``` + +3. Register in `casey_tools_server`: + +```python title="agent/casey.py" +casey_tools_server = create_sdk_mcp_server( + name="casey-tools", + version="1.0.0", + tools=[ + check_github_status_tool, # Add here + # ... other tools + ], +) +``` + +4. Add to `CASEY_TOOLS`: + +```python title="agent/casey.py" +CASEY_TOOLS = [ + "check_github_status", # Add here + # ... other tools +] +``` + +Use this example as a jumping off point for building out an agent with the capabilities you need! \ No newline at end of file diff --git a/docs/english/concepts/message-sending.md b/docs/english/concepts/message-sending.md index 87c433129..090503ff2 100644 --- a/docs/english/concepts/message-sending.md +++ b/docs/english/concepts/message-sending.md @@ -43,37 +43,58 @@ def show_datepicker(event, say): ## Streaming messages {#streaming-messages} -You can have your app's messages stream in to replicate conventional AI chatbot behavior. This is done through three Web API methods: +You can have your app's messages stream in to replicate conventional agent behavior. Bolt for Python provides a `say_stream` utility as a listener argument available for `app.event` and `app.message` listeners. -* [`chat_startStream`](/reference/methods/chat.startStream) -* [`chat_appendStream`](/reference/methods/chat.appendStream) -* [`chat_stopStream`](/reference/methods/chat.stopStream) +The `say_stream` utility streamlines calling the Python Slack SDK's [`WebClient.chat_stream`](https://docs.slack.dev/tools/python-slack-sdk/reference/web/client.html#slack_sdk.web.client.WebClient.chat_stream) helper utility by sourcing parameter values from the relevant event payload. -The Python Slack SDK provides a [`chat_stream()`](https://docs.slack.dev/tools/python-slack-sdk/reference/web/client.html#slack_sdk.web.client.WebClient.chat_stream) helper utility to streamline calling these methods. Here's an excerpt from our [Assistant template app](https://github.com/slack-samples/bolt-python-assistant-template): +| Parameter | Value | +|---|---| +| `channel_id` | Sourced from the event payload. +| `thread_ts` | Sourced from the event payload. Falls back to the `ts` value if available. +| `recipient_team_id` | Sourced from the event `team_id` (`enterprise_id` if the app is installed on an org). +| `recipient_user_id` | Sourced from the `user_id` of the event. -```python -streamer = client.chat_stream( - channel=channel_id, - recipient_team_id=team_id, - recipient_user_id=user_id, - thread_ts=thread_ts, -) - -# Loop over OpenAI response stream -# https://platform.openai.com/docs/api-reference/responses/create -for event in returned_message: - if event.type == "response.output_text.delta": - streamer.append(markdown_text=f"{event.delta}") - else: - continue - -feedback_block = create_feedback_block() -streamer.stop(blocks=feedback_block) +If neither a `channel_id` or `thread_ts` can be sourced, then the utility will be `None`. + +For information on calling the `chat_*Stream` API methods directly, see the [_Sending streaming messages_](/tools/python-slack-sdk/web#sending-streaming-messages) section of the Python Slack SDK docs. + +### Example {#example} + +```py +import os + +from slack_bolt import App, SayStream +from slack_bolt.adapter.socket_mode import SocketModeHandler +from slack_sdk import WebClient + +app = App(token=os.environ.get("SLACK_BOT_TOKEN")) + +@app.event("app_mention") +def handle_app_mention(client: WebClient, say_stream: SayStream): + stream = say_stream() + stream.append(markdown_text="Someone rang the bat signal!") + stream.stop() + +@app.message("") +def handle_message(client: WebClient, say_stream: SayStream): + stream = say_stream() + + stream.append(markdown_text="Let me consult my *vast knowledge database*...) + stream.stop() + +if __name__ == "__main__": + SocketModeHandler(app, os.environ.get("SLACK_APP_TOKEN")).start() ``` -In that example, a [feedback buttons](/reference/block-kit/block-elements/feedback-buttons-element) block element is passed to `streamer.stop` to provide feedback buttons to the user at the bottom of the message. Interaction with these buttons will send a block action event to your app to receive the feedback. +#### Adding feedback buttons after a stream -```python +You can pass a [feedback buttons](/reference/block-kit/block-elements/feedback-buttons-element) block element to `stream.stop` to provide feedback buttons to the user at the bottom of the message. Interaction with these buttons will send a block action event to your app to receive the feedback. + +```py +stream.stop(blocks=feedback_block) +``` + +```py def create_feedback_block() -> List[Block]: blocks: List[Block] = [ ContextActionsBlock( @@ -95,6 +116,4 @@ def create_feedback_block() -> List[Block]: ) ] return blocks -``` - -For information on calling the `chat_*Stream` API methods without the helper utility, see the [_Sending streaming messages_](/tools/python-slack-sdk/web#sending-streaming-messages) section of the Python Slack SDK docs. \ No newline at end of file +``` \ No newline at end of file diff --git a/docs/english/concepts/ai-apps.md b/docs/english/concepts/using-the-assistant-class.md similarity index 66% rename from docs/english/concepts/ai-apps.md rename to docs/english/concepts/using-the-assistant-class.md index 3b057bc7e..ed004dc35 100644 --- a/docs/english/concepts/ai-apps.md +++ b/docs/english/concepts/using-the-assistant-class.md @@ -1,17 +1,10 @@ - -# Using AI in Apps {#using-ai-in-apps} - -The Slack platform offers features tailored for AI agents and assistants. Your apps can [utilize the `Assistant` class](#assistant) for a side-panel view designed with AI in mind, or they can utilize features applicable to messages throughout Slack, like [chat streaming](#text-streaming) and [feedback buttons](#adding-and-handling-feedback). - -If you're unfamiliar with using these feature within Slack, you may want to read the [API documentation on the subject](/ai/). Then come back here to implement them with Bolt! - -## The `Assistant` class instance {#assistant} +# Using the Assistant class :::info[Some features within this guide require a paid plan] If you don't have a paid workspace for development, you can join the [Developer Program](https://api.slack.com/developer-program) and provision a sandbox with access to all Slack features for free. ::: -The [`Assistant`](/tools/bolt-js/reference#the-assistantconfig-configuration-object) class can be used to handle the incoming events expected from a user interacting with an app in Slack that has the Agents & AI Apps feature enabled. +The `Assistant` class can be used to handle the incoming events expected from a user interacting with an app in Slack that has the Agents & AI Apps feature enabled. A typical flow would look like: @@ -63,7 +56,7 @@ If you do provide your own `threadContextStore` property, it must feature `get` :::tip[Refer to the [reference docs](https://docs.slack.dev/tools/bolt-python/reference/kwargs_injection/args.html) to learn the available listener arguments.] ::: -### Configuring your app to support the `Assistant` class {#configuring-assistant-class} +## Configuring your app to support the `Assistant` class {#configuring-assistant-class} 1. Within [App Settings](https://api.slack.com/apps), enable the **Agents & AI Apps** feature. @@ -77,7 +70,7 @@ If you do provide your own `threadContextStore` property, it must feature `get` * [`assistant_thread_context_changed`](/reference/events/assistant_thread_context_changed) * [`message.im`](/reference/events/message.im) -### Handling a new thread {#handling-new-thread} +## Handling a new thread {#handling-new-thread} When the user opens a new thread with your AI-enabled app, the [`assistant_thread_started`](/reference/events/assistant_thread_started) event will be sent to your app. @@ -122,7 +115,7 @@ def start_assistant_thread( You can send more complex messages to the user — see [Sending Block Kit alongside messages](#block-kit-interactions) for more info. -### Handling thread context changes {#handling-thread-context-changes} +## Handling thread context changes {#handling-thread-context-changes} When the user switches channels, the [`assistant_thread_context_changed`](/reference/events/assistant_thread_context_changed) event will be sent to your app. @@ -137,7 +130,7 @@ from slack_bolt import FileAssistantThreadContextStore assistant = Assistant(thread_context_store=FileAssistantThreadContextStore()) ``` -### Handling the user response {#handling-user-response} +## Handling the user response {#handling-user-response} When the user messages your app, the [`message.im`](/reference/events/message.im) event will be sent to your app. @@ -205,7 +198,7 @@ def respond_in_assistant_thread( app.use(assistant) ``` -### Sending Block Kit alongside messages {#block-kit-interactions} +## Sending Block Kit alongside messages {#block-kit-interactions} For advanced use cases, Block Kit buttons may be used instead of suggested prompts, as well as the sending of messages with structured [metadata](/messaging/message-metadata/) to trigger subsequent interactions with the user. @@ -331,182 +324,6 @@ def respond_to_bot_messages(logger: logging.Logger, set_status: SetStatus, say: ... ``` -See the [_Adding and handling feedback_](#adding-and-handling-feedback) section for adding feedback buttons with Block Kit. - -## Text streaming in messages {#text-streaming} - -Three Web API methods work together to provide users a text streaming experience: - -* the [`chat.startStream`](/reference/methods/chat.startStream) method starts the text stream, -* the [`chat.appendStream`](/reference/methods/chat.appendStream) method appends text to the stream, and -* the [`chat.stopStream`](/reference/methods/chat.stopStream) method stops it. - -Since you're using Bolt for Python, built upon the Python Slack SDK, you can use the [`chat_stream()`](https://docs.slack.dev/tools/python-slack-sdk/reference/web/client.html#slack_sdk.web.client.WebClient.chat_stream) utility to streamline all three aspects of streaming in your app's messages. - -The following example uses OpenAI's streaming API with the new `chat_stream()` functionality, but you can substitute it with the AI client of your choice. - - -```python -import os -from typing import List, Dict - -import openai -from openai import Stream -from openai.types.responses import ResponseStreamEvent - -DEFAULT_SYSTEM_CONTENT = """ -You're an assistant in a Slack workspace. -Users in the workspace will ask you to help them write something or to think better about a specific topic. -You'll respond to those questions in a professional way. -When you include markdown text, convert them to Slack compatible ones. -When a prompt has Slack's special syntax like <@USER_ID> or <#CHANNEL_ID>, you must keep them as-is in your response. -""" - -def call_llm( - messages_in_thread: List[Dict[str, str]], - system_content: str = DEFAULT_SYSTEM_CONTENT, -) -> Stream[ResponseStreamEvent]: - openai_client = openai.OpenAI(api_key=os.getenv("OPENAI_API_KEY")) - messages = [{"role": "system", "content": system_content}] - messages.extend(messages_in_thread) - response = openai_client.responses.create(model="gpt-4o-mini", input=messages, stream=True) - return response - -@assistant.user_message -def respond_in_assistant_thread( - ... -): - try: - ... - replies = client.conversations_replies( - channel=context.channel_id, - ts=context.thread_ts, - oldest=context.thread_ts, - limit=10, - ) - messages_in_thread: List[Dict[str, str]] = [] - for message in replies["messages"]: - role = "user" if message.get("bot_id") is None else "assistant" - messages_in_thread.append({"role": role, "content": message["text"]}) - - returned_message = call_llm(messages_in_thread) - - streamer = client.chat_stream( - channel=channel_id, - recipient_team_id=team_id, - recipient_user_id=user_id, - thread_ts=thread_ts, - ) - - # Loop over OpenAI response stream - # https://platform.openai.com/docs/api-reference/responses/create - for event in returned_message: - if event.type == "response.output_text.delta": - streamer.append(markdown_text=f"{event.delta}") - else: - continue - - streamer.stop() - - except Exception as e: - logger.exception(f"Failed to handle a user message event: {e}") - say(f":warning: Something went wrong! ({e})") -``` - -## Adding and handling feedback {#adding-and-handling-feedback} - -Use the [feedback buttons block element](/reference/block-kit/block-elements/feedback-buttons-element/) to allow users to immediately provide feedback regarding your app's responses. Here's a quick example: - -```py -from typing import List -from slack_sdk.models.blocks import Block, ContextActionsBlock, FeedbackButtonsElement, FeedbackButtonObject - - -def create_feedback_block() -> List[Block]: - """ - Create feedback block with thumbs up/down buttons - - Returns: - Block Kit context_actions block - """ - blocks: List[Block] = [ - ContextActionsBlock( - elements=[ - FeedbackButtonsElement( - action_id="feedback", - positive_button=FeedbackButtonObject( - text="Good Response", - accessibility_label="Submit positive feedback on this response", - value="good-feedback", - ), - negative_button=FeedbackButtonObject( - text="Bad Response", - accessibility_label="Submit negative feedback on this response", - value="bad-feedback", - ), - ) - ] - ) - ] - return blocks -``` - -Use the `chat_stream` utility to render the feedback block at the bottom of your app's message. - -```js -... - streamer = client.chat_stream( - channel=channel_id, - recipient_team_id=team_id, - recipient_user_id=user_id, - thread_ts=thread_ts, - ) - - # Loop over OpenAI response stream - # https://platform.openai.com/docs/api-reference/responses/create - for event in returned_message: - if event.type == "response.output_text.delta": - streamer.append(markdown_text=f"{event.delta}") - else: - continue - - feedback_block = create_feedback_block() - streamer.stop(blocks=feedback_block) -... -``` - -Then add a response for when the user provides feedback. - -```python -# Handle feedback buttons (thumbs up/down) -def handle_feedback(ack, body, client, logger: logging.Logger): - try: - ack() - message_ts = body["message"]["ts"] - channel_id = body["channel"]["id"] - feedback_type = body["actions"][0]["value"] - is_positive = feedback_type == "good-feedback" - - if is_positive: - client.chat_postEphemeral( - channel=channel_id, - user=body["user"]["id"], - thread_ts=message_ts, - text="We're glad you found this useful.", - ) - else: - client.chat_postEphemeral( - channel=channel_id, - user=body["user"]["id"], - thread_ts=message_ts, - text="Sorry to hear that response wasn't up to par :slightly_frowning_face: Starting a new chat may help with AI mistakes and hallucinations.", - ) - - logger.debug(f"Handled feedback: type={feedback_type}, message_ts={message_ts}") - except Exception as error: - logger.error(f":warning: Something went wrong! {error}") -``` - -## Full example: App Agent Template {#app-agent-template} +See the [_Creating agents: adding and handling feedback_](/tools/bolt-python/concepts/adding-agent-features#adding-and-handling-feedback) section for adding feedback buttons with Block Kit. -Want to see the functionality described throughout this guide in action? We've created a [App Agent Template](https://github.com/slack-samples/bolt-python-assistant-template) repo for you to build off of. +Want to see the functionality described throughout this guide in action? We've created a [App Agent Template](https://github.com/slack-samples/bolt-python-assistant-template) repo for you to build from. \ No newline at end of file diff --git a/docs/english/building-an-app.md b/docs/english/creating-an-app.md similarity index 99% rename from docs/english/building-an-app.md rename to docs/english/creating-an-app.md index bde340961..7f06e9d42 100644 --- a/docs/english/building-an-app.md +++ b/docs/english/creating-an-app.md @@ -1,8 +1,8 @@ --- -sidebar_label: Building an App +sidebar_label: Creating an app --- -# Building an App with Bolt for Python +# Creating an app with Bolt for Python This guide is meant to walk you through getting up and running with a Slack app using Bolt for Python. Along the way, we’ll create a new Slack app, set up your local environment, and develop an app that listens and responds to messages from a Slack workspace. @@ -10,7 +10,7 @@ When you're finished, you'll have created the [Getting Started app](https://gith --- -### Create an app {#create-an-app} +### Create a new app {#create-an-app} First thing's first: before you start developing with Bolt, you'll want to [create a Slack app](https://api.slack.com/apps/new). :::tip[A place to test and learn] diff --git a/docs/english/experiments.md b/docs/english/experiments.md index 681c8cbc6..13adf0a32 100644 --- a/docs/english/experiments.md +++ b/docs/english/experiments.md @@ -28,7 +28,3 @@ def handle_mention(agent: BoltAgent): stream.append(markdown_text="Hello!") stream.stop() ``` - -### Limitations - -The `chat_stream()` method currently only works when the `thread_ts` field is available in the event context (DMs and threaded replies). Top-level channel messages do not have a `thread_ts` field, and the `ts` field is not yet provided to `BoltAgent`. \ No newline at end of file diff --git a/docs/english/getting-started.md b/docs/english/getting-started.md index 934dd3bae..6964df23b 100644 --- a/docs/english/getting-started.md +++ b/docs/english/getting-started.md @@ -279,55 +279,10 @@ This will open the following page: On these pages you're free to make changes such as updating your app icon, configuring app features, and perhaps even distributing your app! -## Adding AI features {#ai-features} - -Now that you're familiar with a basic app setup, try it out again, this time using the AI agent template! - - - - -Get started with the agent template: - -```sh -$ slack create ai-app --template slack-samples/bolt-python-assistant-template -$ cd ai-app -``` - - - - -Get started with the agent template: - -```sh -$ git clone https://github.com/slack-samples/bolt-python-assistant-template ai-app -$ cd ai-app -``` - -Using this method, be sure to set the app and bot tokens as we did in the [Running the app](#running-the-app) section above. - - - - -Once the project is created, update the `.env.sample` file by setting the `OPENAI_API_KEY` with the value of your key and removing the `.sample` from the file name. - -In the `ai` folder of this app, you'll find default instructions for the LLM and an OpenAI client setup. - -The `listeners` include utilities intended for messaging with an LLM. Those are outlined in detail in the guide to [Using AI in apps](/tools/bolt-python/concepts/ai-apps) and [Sending messages](/tools/bolt-python/concepts/message-sending). - ## Next steps {#next-steps} -Congrats once more on getting up and running with this quick start. - -:::info[Dive deeper] - -Follow along with the steps that went into making this app on the [building an app](/tools/bolt-python/building-an-app) guide for an educational overview. - -::: - You can now continue customizing your app with various features to make it right for whatever job's at hand. Here are some ideas about what to explore next: -- Explore the different events your bot can listen to with the [`app.event()`](/tools/bolt-python/concepts/event-listening) method. See the full events reference [here](/reference/events). -- Bolt allows you to call [Web API](/tools/bolt-python/concepts/web-api) methods with the client attached to your app. There are [over 200 methods](/reference/methods) available. -- Learn more about the different [token types](/authentication/tokens) and [authentication setups](/tools/bolt-python/concepts/authenticating-oauth). Your app might need different tokens depending on the actions you want to perform or for installations to multiple workspaces. -- Receive events using HTTP for various deployment methods, such as deploying to Heroku or AWS Lambda. -- Read on [app design](/surfaces/app-design) and compose fancy messages with blocks using [Block Kit Builder](https://app.slack.com/block-kit-builder) to prototype messages. +- Follow along with the steps that went into making this app on the [creating an app](/tools/bolt-python/creating-an-app) guide for an educational overview. +- Check out the [Agent quickstart](/ai/agent-quickstart) to get up and running with an agent. +- Browse our [curated catalog of samples](/samples) for more apps to use as a starting point for development. \ No newline at end of file diff --git a/docs/english/tutorial/ai-chatbot/ai-chatbot.md b/docs/english/tutorial/ai-chatbot/ai-chatbot.md index 72005f817..2fcc16e9a 100644 --- a/docs/english/tutorial/ai-chatbot/ai-chatbot.md +++ b/docs/english/tutorial/ai-chatbot/ai-chatbot.md @@ -1,64 +1,72 @@ # AI Chatbot -In this tutorial, you'll learn how to bring the power of AI into your Slack workspace using a chatbot called Bolty that uses Anthropic or OpenAI. Here's what we'll do with this sample app: - -1. Create your app from an app manifest and clone a starter template -2. Set up and run your local project -3. Create a workflow using Workflow Builder to summarize messages in conversations -4. Select your preferred API and model to customize Bolty's responses -5. Interact with Bolty via direct message, the `/ask-bolty` slash command, or by mentioning the app in conversations +In this tutorial, you'll learn how to bring the power of AI into your Slack workspace using a chatbot called Bolty that uses Anthropic or OpenAI. + +With Bolty, users can: + +- send direct messages to Bolty and get AI-powered responses in response, +- use the `/ask-bolty` slash command to ask Bolty questions, and +- receive channel summaries when joining new channels. + +Intrigued? First, grab your tools by following the three steps below. + +import QuickstartGuide from '@site/src/components/QuickstartGuide'; + + + +
## Prerequisites {#prereqs} -Before getting started, you will need the following: +You will also need the following: -- a development workspace where you have permissions to install apps. If you don’t have a workspace, go ahead and set that up now — you can [go here](https://slack.com/get-started#create) to create one, or you can join the [Developer Program](https://api.slack.com/developer-program) and provision a sandbox with access to all Slack features for free. +- a development workspace where you have permissions to install apps. If you don’t have a workspace you can join the [Developer Program](https://api.slack.com/developer-program) and provision a sandbox with access to all Slack features for free. - a development environment with [Python 3.7](https://www.python.org/downloads/) or later. - an Anthropic or OpenAI account with sufficient credits, and in which you have generated a secret key. -**Skip to the code** -If you'd rather skip the tutorial and just head straight to the code, you can use our [Bolt for Python AI Chatbot sample](https://github.com/slack-samples/bolt-python-ai-chatbot) as a template. - -## Creating your app {#create-app} - -1. Navigate to the [app creation page](https://api.slack.com/apps/new) and select **From a manifest**. -2. Select the workspace you want to install the application in. -3. Copy the contents of the [`manifest.json`](https://github.com/slack-samples/bolt-python-ai-chatbot/blob/main/manifest.json) file into the text box that says **Paste your manifest code here** (within the **JSON** tab) and click **Next**. -4. Review the configuration and click **Create**. -5. You're now in your app configuration's **Basic Information** page. Navigate to the **Install App** link in the left nav and click **Install to Workspace**, then **Allow** on the screen that follows. - ### Obtaining and storing your environment variables {#environment-variables} Before you'll be able to successfully run the app, you'll need to first obtain and set some environment variables. -#### Slack tokens {#slack-tokens} - -From your app's page on [app settings](https://api.slack.com/apps) collect an app and bot token: - -1. On the **Install App** page, copy your **Bot User OAuth Token**. You will store this in your environment as `SLACK_BOT_TOKEN` (we'll get to that next). -2. Navigate to **Basic Information** and in the **App-Level Tokens** section , click **Generate Token and Scopes**. Add the [`connections:write`](/reference/scopes/connections.write) scope, name the token, and click **Generate**. (For more details, refer to [understanding OAuth scopes for bots](/authentication/tokens#bot)). Copy this token. You will store this in your environment as `SLACK_APP_TOKEN`. - -To store your tokens and environment variables, run the following commands in the terminal. Replace the placeholder values with your bot and app tokens collected above: - -**For macOS** - -```bash -export SLACK_BOT_TOKEN= -export SLACK_APP_TOKEN= -``` - -**For Windows** - -```pwsh -set SLACK_BOT_TOKEN= -set SLACK_APP_TOKEN= -``` - #### Provider tokens {#provider-tokens} Models from different AI providers are available if the corresponding environment variable is added as shown in the sections below. -##### Anthropic {#anthropic} + + To interact with Anthropic models, navigate to your Anthropic account dashboard to [create an API key](https://console.anthropic.com/settings/keys), then export the key as follows: @@ -66,7 +74,8 @@ To interact with Anthropic models, navigate to your Anthropic account dashboard export ANTHROPIC_API_KEY= ``` -##### Google Cloud Vertex AI {#google-cloud-vertex-ai} + + To use Google Cloud Vertex AI, [follow this quick start](https://cloud.google.com/vertex-ai/generative-ai/docs/start/quickstarts/quickstart-multimodal#expandable-1) to create a project for sending requests to the Gemini API, then gather [Application Default Credentials](https://cloud.google.com/docs/authentication/provide-credentials-adc) with the strategy to match your development environment. @@ -79,7 +88,8 @@ export VERTEX_AI_LOCATION= The project location can be located under the **Region** on the [Vertex AI](https://console.cloud.google.com/vertex-ai) dashboard, as well as more details about available Gemini models. -##### OpenAI {#openai} + + Unlock the OpenAI models from your OpenAI account dashboard by clicking [create a new secret key](https://platform.openai.com/api-keys), then export the key like so: @@ -87,49 +97,46 @@ Unlock the OpenAI models from your OpenAI account dashboard by clicking [create export OPENAI_API_KEY= ``` -## Setting up and running your local project {#configure-project} - -Clone the starter template onto your machine by running the following command: - -```bash -git clone https://github.com/slack-samples/bolt-python-ai-chatbot.git -``` + + -Change into the new project directory: +## Setting up and running your local project {#configure-project} -```bash -cd bolt-python-ai-chatbot -``` Start your Python virtual environment: -**For macOS** + + ```bash python3 -m venv .venv source .venv/bin/activate ``` -**For Windows** + + ```bash py -m venv .venv .venv\Scripts\activate ``` + + + Install the required dependencies: ```bash pip install -r requirements.txt ``` -Start your local server: +Run your app locally: ```bash -python app.py +slack run ``` -If your app is up and running, you'll see a message that says "⚡️ Bolt app is running!" +If your app is indeed up and running, you'll see a message that says "⚡️ Bolt app is running!" ## Choosing your provider {#provider} @@ -235,5 +242,4 @@ You can also navigate to **Bolty** in your **Apps** list and select the **Messag Congratulations! You've successfully integrated the power of AI into your workspace. Check out these links to take the next steps in your Bolt for Python journey. - To learn more about Bolt for Python, refer to the [Getting started](/tools/bolt-python/getting-started) documentation. -- For more details about creating workflow steps using the Bolt SDK, refer to the [workflow steps for Bolt](/workflows/workflow-steps) guide. -- To use the Bolt for Python SDK to develop on the automations platform, refer to the [Create a workflow step for Workflow Builder: Bolt for Python](/tools/bolt-python/tutorial/custom-steps-workflow-builder-new) tutorial. +- For more details about creating workflow steps using the Bolt SDK, refer to the [workflow steps for Bolt](/workflows/workflow-steps) guide. \ No newline at end of file diff --git a/docs/japanese/concepts/assistant.md b/docs/japanese/concepts/assistant.md deleted file mode 100644 index 664108607..000000000 --- a/docs/japanese/concepts/assistant.md +++ /dev/null @@ -1,227 +0,0 @@ -# エージェント・アシスタント - -このページは、Bolt を使ってエージェント・アシスタントを実装するための方法を紹介します。この機能に関する一般的な情報については、[こちらのドキュメントページ(英語)](/ai/)を参照してください。 - -この機能を実装するためには、まず[アプリの設定画面](https://api.slack.com/apps)で **Agents & Assistants** 機能を有効にし、**OAuth & Permissions** のページで [`assistant:write`](/reference/scopes/assistant.write)、[chat:write](/reference/scopes/chat.write)、[`im:history`](/reference/scopes/im.history) を**ボットの**スコープに追加し、**Event Subscriptions** のページで [`assistant_thread_started`](/reference/events/assistant_thread_started)、[`assistant_thread_context_changed`](/reference/events/assistant_thread_context_changed)、[`message.im`](/reference/events/message.im) イベントを有効にしてください。 - -また、この機能は Slack の有料プランでのみ利用可能です。もし開発用の有料プランのワークスペースをお持ちでない場合は、[Developer Program](https://api.slack.com/developer-program) に参加し、全ての有料プラン向け機能を利用可能なサンドボックス環境をつくることができます。 - -ユーザーとのアシスタントスレッド内でのやりとりを処理するには、`assistant_thread_started`、`assistant_thread_context_changed`、`message` イベントの `app.event(...)` リスナーを使うことも可能ですが、Bolt はよりシンプルなアプローチを提供しています。`Assistant` インスタンスを作り、それに必要なイベントリスナーを追加し、最後にこのアシスタント設定を `App` インスタンスに渡すだけでよいのです。 - -```python -assistant = Assistant() - -# ユーザーがアシスタントスレッドを開いたときに呼び出されます -@assistant.thread_started -def start_assistant_thread(say: Say, set_suggested_prompts: SetSuggestedPrompts): - # ユーザーに対して最初の返信を送信します - say(":wave: Hi, how can I help you today?") - - # プロンプト例を送るのは必須ではありません - set_suggested_prompts( - prompts=[ - # もしプロンプトが長い場合は {"title": "表示する短いラベル", "message": "完全なプロンプト"} を使うことができます - "What does SLACK stand for?", - "When Slack was released?", - ], - ) - -# ユーザーがスレッド内で返信したときに呼び出されます -@assistant.user_message -def respond_in_assistant_thread( - payload: dict, - logger: logging.Logger, - context: BoltContext, - set_status: SetStatus, - say: Say, - client: WebClient, -): - try: - # ユーザーにこのbotがリクエストを受信して作業中であることを伝えます - set_status("is typing...") - - # 会話の履歴を取得します - replies_in_thread = client.conversations_replies( - channel=context.channel_id, - ts=context.thread_ts, - oldest=context.thread_ts, - limit=10, - ) - messages_in_thread: List[Dict[str, str]] = [] - for message in replies_in_thread["messages"]: - role = "user" if message.get("bot_id") is None else "assistant" - messages_in_thread.append({"role": role, "content": message["text"]}) - - # プロンプトと会話の履歴を LLM に渡します(この call_llm はあなた自身のコードです) - returned_message = call_llm(messages_in_thread) - - # 結果をアシスタントスレッドに送信します - say(text=returned_message) - - except Exception as e: - logger.exception(f"Failed to respond to an inquiry: {e}") - # エラーになった場合は必ずメッセージを送信するようにしてください - # そうしなかった場合、'is typing...' の表示のままになってしまい、ユーザーは会話を続けることができなくなります - say(f":warning: Sorry, something went wrong during processing your request (error: {e})") - -# このミドルウェアを Bolt アプリに追加します -app.use(assistant) -``` - -リスナーに指定可能な引数の一覧は[モジュールドキュメント](https://docs.slack.dev/tools/bolt-python/reference/kwargs_injection/args.html)を参考にしてください。 - -ユーザーがチャンネルの横でアシスタントスレッドを開いた場合、そのチャンネルの情報は、そのスレッドの `AssistantThreadContext` データとして保持され、 `get_thread_context` ユーティリティを使ってアクセスすることができます。Bolt がこのユーティリティを提供している理由は、後続のユーザーメッセージ投稿のイベントペイロードに最新のスレッドのコンテキスト情報は含まれないためです。そのため、アプリはコンテキスト情報が変更されたタイミングでそれを何らかの方法で保存し、後続のメッセージイベントのリスナーコードから参照できるようにする必要があります。 - -そのユーザーがチャンネルを切り替えた場合、`assistant_thread_context_changed` イベントがあなたのアプリに送信されます。(上記のコード例のように)組み込みの `Assistant` ミドルウェアをカスタム設定なしで利用している場合、この更新されたチャンネル情報は、自動的にこのアシスタントボットからの最初の返信のメッセージメタデータとして保存されます。これは、組み込みの仕組みを使う場合は、このコンテキスト情報を自前で用意したデータストアに保存する必要はないということです。この組み込みの仕組みの唯一の短所は、追加の Slack API 呼び出しによる処理時間のオーバーヘッドです。具体的には `get_thread_context` を実行したときに、この保存されたメッセージメタデータにアクセスするために `conversations.history` API が呼び出されます。 - -このデータを別の場所に保存したい場合、自前の `AssistantThreadContextStore` 実装を `Assistant` のコンストラクターに渡すことができます。リファレンス実装として、`FileAssistantThreadContextStore` というローカルファイルシステムを使って実装を提供しています: - -```python -# これはあくまで例であり、自前のものを渡すことができます -from slack_bolt import FileAssistantThreadContextStore -assistant = Assistant(thread_context_store=FileAssistantThreadContextStore()) -``` - -このリファレンス実装はローカルファイルに依存しており、本番環境での利用は推奨しません。本番アプリでは `AssistantThreadContextStore` を継承した自前のクラスを使うようにしてください。 - -最後に、動作する完全なサンプルコード例を確認したい場合は、私たちが GitHub 上で提供している[サンプルアプリのリポジトリ](https://github.com/slack-samples/bolt-python-assistant-template)をチェックしてみてください。 - -## アシスタントスレッドでの Block Kit インタラクション - -より高度なユースケースでは、上のようなプロンプト例の提案ではなく Block Kit のボタンなどを使いたいという場合があるかもしれません。そして、後続の処理のために[構造化されたメッセージメタデータ](/messaging/message-metadata/)を含むメッセージを送信したいという場合もあるでしょう。 - -例えば、アプリが最初の返信で「参照しているチャンネルを要約」のようなボタンを表示し、ユーザーがそれをクリックして、より詳細な情報(例:要約するメッセージ数・日数、要約の目的など)を送信、アプリがそれを構造化されたメータデータに整理した上でリクエスト内容をボットのメッセージとして送信するようなシナリオです。 - -デフォルトでは、アプリはそのアプリ自身から送信したボットメッセージに応答することはできません(Bolt にはあらかじめ無限ループを防止する制御が入っているため)。`ignoring_self_assistant_message_events_enabled=False` を `App` のコンストラクターに渡し、`bot_message` リスナーを `Assistant` ミドルウェアに追加すると、上記の例のようなリクエストを伝えるボットメッセージを使って処理を継続することができるようになります。 - -```python -app = App( - token=os.environ["SLACK_BOT_TOKEN"], - # bot message を受け取るには必ずこれを指定してください - ignoring_self_assistant_message_events_enabled=False, -) - -assistant = Assistant() - -# リスナーに指定可能な引数の一覧は https://docs.slack.dev/tools/bolt-python/reference/kwargs_injection/args.html を参照してください - -@assistant.thread_started -def start_assistant_thread(say: Say): - say( - text=":wave: Hi, how can I help you today?", - blocks=[ - { - "type": "section", - "text": {"type": "mrkdwn", "text": ":wave: Hi, how can I help you today?"}, - }, - { - "type": "actions", - "elements": [ - # 複数のボタンを配置することが可能です - { - "type": "button", - "action_id": "assistant-generate-random-numbers", - "text": {"type": "plain_text", "text": "Generate random numbers"}, - "value": "clicked", - }, - ], - }, - ], - ) - -# 上のボタンがクリックされたときに実行されます -@app.action("assistant-generate-random-numbers") -def configure_random_number_generation(ack: Ack, client: WebClient, body: dict): - ack() - client.views_open( - trigger_id=body["trigger_id"], - view={ - "type": "modal", - "callback_id": "configure_assistant_summarize_channel", - "title": {"type": "plain_text", "text": "My Assistant"}, - "submit": {"type": "plain_text", "text": "Submit"}, - "close": {"type": "plain_text", "text": "Cancel"}, - # アシスタントスレッドの情報を app.view リスナーに引き継ぎます - "private_metadata": json.dumps( - { - "channel_id": body["channel"]["id"], - "thread_ts": body["message"]["thread_ts"], - } - ), - "blocks": [ - { - "type": "input", - "block_id": "num", - "label": {"type": "plain_text", "text": "# of outputs"}, - # 自然言語のテキストではなく、あらかじめ決められた形式の入力を受け取ることができます - "element": { - "type": "static_select", - "action_id": "input", - "placeholder": {"type": "plain_text", "text": "How many numbers do you need?"}, - "options": [ - {"text": {"type": "plain_text", "text": "5"}, "value": "5"}, - {"text": {"type": "plain_text", "text": "10"}, "value": "10"}, - {"text": {"type": "plain_text", "text": "20"}, "value": "20"}, - ], - "initial_option": {"text": {"type": "plain_text", "text": "5"}, "value": "5"}, - }, - } - ], - }, - ) - -# 上のモーダルが送信されたときに実行されます -@app.view("configure_assistant_summarize_channel") -def receive_random_number_generation_details(ack: Ack, client: WebClient, payload: dict): - ack() - num = payload["state"]["values"]["num"]["input"]["selected_option"]["value"] - thread = json.loads(payload["private_metadata"]) - - # 構造化された入力情報とともにボットのメッセージを送信します - # 以下の assistant.bot_message リスナーが処理を継続します - # このリスナー内で処理したい場合はそれでも構いません! - # bot_message リスナーが必要ない場合は ignoring_self_assistant_message_events_enabled=False を設定する必要はありません - client.chat_postMessage( - channel=thread["channel_id"], - thread_ts=thread["thread_ts"], - text=f"OK, you need {num} numbers. I will generate it shortly!", - metadata={ - "event_type": "assistant-generate-random-numbers", - "event_payload": {"num": int(num)}, - }, - ) - -# このアプリのボットユーザーがメッセージを送信したときに実行されます -@assistant.bot_message -def respond_to_bot_messages(logger: logging.Logger, set_status: SetStatus, say: Say, payload: dict): - try: - if payload.get("metadata", {}).get("event_type") == "assistant-generate-random-numbers": - # 上の random-number-generation リクエストを処理します - set_status("is generating an array of random numbers...") - time.sleep(1) - nums: Set[str] = set() - num = payload["metadata"]["event_payload"]["num"] - while len(nums) < num: - nums.add(str(random.randint(1, 100))) - say(f"Here you are: {', '.join(nums)}") - else: - # それ以外のパターンでは何もしません - # さらに他のパターンを追加する場合、メッセージ送信の無限ループを起こさないよう注意して実装してください - pass - - except Exception as e: - logger.exception(f"Failed to respond to an inquiry: {e}") - -# ユーザーが返信したときに実行されます -@assistant.user_message -def respond_to_user_messages(logger: logging.Logger, set_status: SetStatus, say: Say): - try: - set_status("is typing...") - say("Please use the buttons in the first reply instead :bow:") - except Exception as e: - logger.exception(f"Failed to respond to an inquiry: {e}") - say(f":warning: Sorry, something went wrong during processing your request (error: {e})") - -# このミドルウェアを Bolt アプリに追加します -app.use(assistant) -``` \ No newline at end of file