Skip to content

Commit 3d87cec

Browse files
authored
Python: Fix SK migration samples (#5047)
* Fix SK migration samples * Fix env vars for SK * Hard code model for sheel tool samples
1 parent 628bb1a commit 3d87cec

6 files changed

Lines changed: 27 additions & 10 deletions

File tree

.github/workflows/python-sample-validation.yml

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -610,14 +610,19 @@ jobs:
610610
env:
611611
FOUNDRY_PROJECT_ENDPOINT: ${{ vars.FOUNDRY_PROJECT_ENDPOINT || vars.AZURE_AI_PROJECT_ENDPOINT }}
612612
FOUNDRY_MODEL: ${{ vars.FOUNDRY_MODEL || vars.AZUREOPENAI__RESPONSESDEPLOYMENTNAME }}
613-
# Azure OpenAI configuration
613+
# Azure OpenAI configuration for AF
614614
AZURE_OPENAI_ENDPOINT: ${{ vars.AZUREOPENAI__ENDPOINT }}
615615
AZURE_OPENAI_MODEL: ${{ vars.AZURE_OPENAI_DEPLOYMENT_NAME || vars.AZUREOPENAI__RESPONSESDEPLOYMENTNAME }}
616-
# OpenAI configuration
616+
# Azure OpenAI configuration for SK
617+
AZURE_OPENAI_CHAT_DEPLOYMENT_NAME: ${{ vars.AZURE_OPENAI_DEPLOYMENT_NAME }}
618+
# OpenAI key
617619
OPENAI_API_KEY: ${{ secrets.OPENAI__APIKEY }}
620+
# OpenAI configuration for AF
618621
OPENAI_CHAT_MODEL: ${{ vars.OPENAI__CHATMODELID }}
619622
OPENAI_RESPONSES_MODEL: ${{ vars.OPENAI__RESPONSESMODELID }}
620623
OPENAI_MODEL: ${{ vars.OPENAI__RESPONSESMODELID }}
624+
# OpenAI configuration for SK
625+
OPENAI_CHAT_MODEL_ID: ${{ vars.OPENAI__CHATMODELID }}
621626
# Copilot Studio
622627
COPILOTSTUDIOAGENT__ENVIRONMENTID: ${{ secrets.COPILOTSTUDIOAGENT__ENVIRONMENTID }}
623628
COPILOTSTUDIOAGENT__SCHEMANAME: ${{ secrets.COPILOTSTUDIOAGENT__SCHEMANAME }}

python/samples/02-agents/providers/openai/client_with_local_shell.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,9 @@
1818
that wraps Python's subprocess module. Unlike the hosted shell tool (get_shell_tool()),
1919
local shell execution runs commands on YOUR machine, not in a remote container.
2020
21+
Currently not all models support the shell tool. Refer to the OpenAI documentation for the
22+
list of supported models: https://developers.openai.com/api/docs/models/
23+
2124
SECURITY NOTE: This example executes real commands on your local machine.
2225
Only enable this when you trust the agent's actions. Consider implementing
2326
allowlists, sandboxing, or approval workflows for production use.
@@ -53,7 +56,10 @@ async def main() -> None:
5356
print("=== OpenAI Agent with Local Shell Tool Example ===")
5457
print("NOTE: Commands will execute on your local machine.\n")
5558

56-
client = OpenAIChatClient()
59+
# Currently not all models support the shell tool. Refer to the OpenAI
60+
# documentation for the list of supported models:
61+
# https://developers.openai.com/api/docs/models/
62+
client = OpenAIChatClient(model="gpt-5.4-nano")
5763
local_shell_tool = client.get_shell_tool(
5864
func=run_bash,
5965
)

python/samples/02-agents/providers/openai/client_with_shell.py

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,20 @@
1717
1818
The shell tool allows the model to run commands like listing files, running scripts,
1919
or performing system operations within a secure, sandboxed container.
20+
21+
Currently not all models support the shell tool. Refer to the OpenAI documentation
22+
for the list of supported models: https://developers.openai.com/api/docs/models/
2023
"""
2124

2225

2326
async def main() -> None:
2427
"""Example showing how to use the shell tool with OpenAI Chat."""
2528
print("=== OpenAI Chat Client Agent with Shell Tool Example ===")
2629

27-
client = OpenAIChatClient()
30+
# Currently not all models support the shell tool. Refer to the OpenAI
31+
# documentation for the list of supported models:
32+
# https://developers.openai.com/api/docs/models/
33+
client = OpenAIChatClient(model="gpt-5.4-nano")
2834

2935
# Create a hosted shell tool with the default auto container environment
3036
shell_tool = client.get_shell_tool()

python/samples/semantic-kernel-migration/openai_responses/01_basic_responses_agent.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -23,12 +23,12 @@ async def run_semantic_kernel() -> None:
2323
from semantic_kernel.connectors.ai.open_ai import OpenAISettings
2424

2525
openai_settings = OpenAISettings()
26-
assert openai_settings.responses_model is not None, "Responses model ID must be set in OpenAISettings"
26+
assert openai_settings.responses_model_id is not None, "Responses model ID must be set in OpenAISettings"
2727

2828
client = OpenAIResponsesAgent.create_client()
2929
# SK response agents wrap OpenAI's hosted Responses API.
3030
agent = OpenAIResponsesAgent(
31-
ai_model=openai_settings.responses_model,
31+
ai_model_id=openai_settings.responses_model_id,
3232
client=client,
3333
instructions="Answer in one concise sentence.",
3434
name="Expert",

python/samples/semantic-kernel-migration/openai_responses/02_responses_agent_with_tool.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,12 +29,12 @@ def add(self, a: float, b: float) -> float:
2929
return a + b
3030

3131
openai_settings = OpenAISettings()
32-
assert openai_settings.responses_model is not None, "Responses model ID must be set in OpenAISettings"
32+
assert openai_settings.responses_model_id is not None, "Responses model ID must be set in OpenAISettings"
3333

3434
client = OpenAIResponsesAgent.create_client()
3535
# Plugins advertise callable tools to the Responses agent.
3636
agent = OpenAIResponsesAgent(
37-
ai_model=openai_settings.responses_model,
37+
ai_model_id=openai_settings.responses_model_id,
3838
client=client,
3939
instructions="Use the add tool when math is required.",
4040
name="MathExpert",

python/samples/semantic-kernel-migration/openai_responses/03_responses_agent_structured_output.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,12 +30,12 @@ async def run_semantic_kernel() -> None:
3030
from semantic_kernel.connectors.ai.open_ai import OpenAISettings
3131

3232
openai_settings = OpenAISettings()
33-
assert openai_settings.responses_model is not None, "Responses model ID must be set in OpenAISettings"
33+
assert openai_settings.responses_model_id is not None, "Responses model ID must be set in OpenAISettings"
3434

3535
client = OpenAIResponsesAgent.create_client()
3636
# response_format requests schema-constrained output from the model.
3737
agent = OpenAIResponsesAgent(
38-
ai_model=openai_settings.responses_model,
38+
ai_model_id=openai_settings.responses_model_id,
3939
client=client,
4040
instructions="Return launch briefs as structured JSON.",
4141
name="ProductMarketer",

0 commit comments

Comments
 (0)