Skip to content

Commit e05fe84

Browse files
authored
Bump packages to resolve CVEs (#87)
* uv.lock * Upgrade packages * Fix errors after upgrade * Bump up GH workflow commit pins
1 parent 1bc49be commit e05fe84

4 files changed

Lines changed: 464 additions & 446 deletions

File tree

.github/workflows/lint.yml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,8 @@ jobs:
55
lint-stage:
66
runs-on: ubuntu-latest
77
steps:
8-
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8
9-
- uses: astral-sh/setup-uv@9cfd02964306b527feff5fee75acfd028cce4260
8+
- uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd
9+
- uses: astral-sh/setup-uv@fe3617d6e9d89d9b2ddd353e1b3c5d20d20c896f
1010
with:
1111
activate-environment: true
1212
- name: Verify uv.lock is up-to-date

pyproject.toml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,9 @@ select = [
9696
"RUF", # ruff-specific rules
9797
"UP", # pyupgrade
9898
]
99+
ignore = [
100+
"E501", # line-length
101+
]
99102

100103
[tool.ruff.lint.isort]
101104
combine-as-imports = true

splunklib/ai/engines/langchain.py

Lines changed: 12 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,10 @@
2727
ModelResponse as LC_ModelResponse,
2828
)
2929
from langchain.agents.middleware.summarization import TokenCounter as LC_TokenCounter
30-
from langchain.agents.middleware.types import ModelCallResult as LC_ModelCallResult
30+
from langchain.agents.middleware.types import (
31+
ExtendedModelResponse as LC_ExtendedModelResponse,
32+
ModelCallResult as LC_ModelCallResult,
33+
)
3134
from langchain.messages import (
3235
AIMessage as LC_AIMessage,
3336
AnyMessage as LC_AnyMessage,
@@ -70,10 +73,10 @@
7073
ToolMessage,
7174
)
7275
from splunklib.ai.middleware import (
73-
AgentMiddlewareHandler,
74-
AgentState,
7576
AgentMiddleware,
77+
AgentMiddlewareHandler,
7678
AgentRequest,
79+
AgentState,
7780
ModelMiddlewareHandler,
7881
ModelRequest,
7982
ModelResponse,
@@ -130,7 +133,7 @@ def __init__(
130133
model: BaseChatModel,
131134
tools: list[BaseTool],
132135
output_schema: type[OutputT] | None,
133-
lcmiddleware: Sequence[LC_AgentMiddleware] | None = None,
136+
lc_middleware: Sequence[LC_AgentMiddleware] | None = None,
134137
middleware: Sequence[AgentMiddleware] | None = None,
135138
) -> None:
136139
super().__init__()
@@ -147,7 +150,7 @@ def __init__(
147150
system_prompt=system_prompt,
148151
checkpointer=checkpointer,
149152
response_format=output_schema,
150-
middleware=lcmiddleware or [],
153+
middleware=lc_middleware or [],
151154
)
152155

153156
def _with_agent_middleware(
@@ -300,7 +303,7 @@ async def create_agent(
300303
model=model_impl,
301304
tools=tools,
302305
output_schema=agent.output_schema,
303-
lcmiddleware=middleware,
306+
lc_middleware=middleware,
304307
middleware=agent.middleware,
305308
)
306309

@@ -589,6 +592,9 @@ def _convert_tool_message_from_lc(
589592

590593

591594
def _convert_model_result_from_lc(model_response: LC_ModelCallResult) -> ModelResponse:
595+
if isinstance(model_response, LC_ExtendedModelResponse):
596+
model_response = model_response.model_response
597+
592598
if isinstance(model_response, LC_ModelResponse):
593599
ai_message = next(
594600
(m for m in model_response.result if isinstance(m, LC_AIMessage)), None

0 commit comments

Comments
 (0)