Skip to content

Commit 9cf55f6

Browse files
committed
WIP: emit all events on debug
1 parent 4c913d1 commit 9cf55f6

File tree

3 files changed

+76
-29
lines changed

3 files changed

+76
-29
lines changed

pyproject.toml

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-llamaindex"
3-
version = "0.0.28"
3+
version = "0.0.29"
44
description = "UiPath LlamaIndex SDK"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.10"
@@ -9,7 +9,7 @@ dependencies = [
99
"llama-index-embeddings-azure-openai>=0.3.8",
1010
"llama-index-llms-azure-openai>=0.3.2",
1111
"openinference-instrumentation-llama-index>=4.3.0",
12-
"uipath>=2.0.71, <2.1.0",
12+
"uipath>=3.0.0.dev1004330509,<3.0.0.dev1004340000"
1313
]
1414
classifiers = [
1515
"Development Status :: 3 - Alpha",
@@ -91,3 +91,5 @@ url = "https://test.pypi.org/simple/"
9191
publish-url = "https://test.pypi.org/legacy/"
9292
explicit = true
9393

94+
[tool.uv.sources]
95+
uipath = { index = "testpypi" }

src/uipath_llamaindex/_cli/_runtime/_runtime.py

Lines changed: 67 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -2,14 +2,15 @@
22
import logging
33
import os
44
import pickle
5+
from cgitb import handler
56
from contextlib import suppress
67
from typing import Optional, cast
78

89
from llama_index.core.workflow import (
910
Context,
1011
HumanResponseEvent,
1112
InputRequiredEvent,
12-
JsonPickleSerializer,
13+
JsonPickleSerializer, StartEvent,
1314
)
1415
from llama_index.core.workflow.errors import WorkflowTimeoutError
1516
from llama_index.core.workflow.handler import WorkflowHandler
@@ -87,30 +88,12 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
8788
if self.context.workflow_context is None:
8889
return None
8990

90-
handler: WorkflowHandler = self.context.workflow.run(
91-
start_event=ev if self.context.resume else None,
92-
ctx=self.context.workflow_context,
93-
**(self.context.input_json or {}),
94-
)
95-
9691
resume_trigger: Optional[UiPathResumeTrigger] = None
9792

98-
response_applied = False
99-
async for event in handler.stream_events():
100-
# log the received event on trace level
101-
if isinstance(event, InputRequiredEvent):
102-
# for api trigger hitl scenarios only pass the str input for processing
103-
hitl_processor = HitlProcessor(value=event.prefix)
104-
if self.context.resume and not response_applied:
105-
# If we are resuming, we need to apply the response to the event stream.
106-
response_applied = True
107-
response_event = await self.get_response_event()
108-
if response_event:
109-
# If we have a response event, send it to the workflow context.
110-
self.context.workflow_context.send_event(response_event)
111-
else:
112-
resume_trigger = await hitl_processor.create_resume_trigger()
113-
break
93+
if self.context.debug and os.getenv("VSCODE_EXPLORER_EXECUTION") == "True":
94+
handler = await self.debug_workflow()
95+
else:
96+
handler, resume_trigger = await self.run_workflow(ev)
11497

11598
if resume_trigger is None:
11699
try:
@@ -179,6 +162,67 @@ async def execute(self) -> Optional[UiPathRuntimeResult]:
179162
finally:
180163
self.trace_provider.shutdown()
181164

165+
async def debug_workflow(self):
166+
try:
167+
import debugpy
168+
except ImportError:
169+
raise Exception("DebugPy not available")
170+
171+
handler: WorkflowHandler = self.context.workflow.run(
172+
stepwise=True,
173+
**(self.context.input_json or {}),
174+
)
175+
# if we are in debug mode, emit all events
176+
while produced_events := await handler.run_step():
177+
for ev in produced_events:
178+
debug_file_path = os.environ.get('VSCODE_EXTENSION_TEMP_DEBUG_FILE')
179+
if debug_file_path:
180+
# Write the debug data
181+
debug_data = {
182+
'event': ev,
183+
'variables': {
184+
'current_event': ev,
185+
'handler_state': handler.ctx.state if hasattr(handler.ctx, 'state') else None,
186+
}
187+
}
188+
with open(debug_file_path, 'w') as f:
189+
json.dump(debug_data, f)
190+
191+
# Trigger breakpoint
192+
debugpy.breakpoint()
193+
194+
# Send the event
195+
handler.ctx.send_event(ev)
196+
return handler
197+
198+
async def run_workflow(self, start_event :type[StartEvent]) -> (WorkflowHandler, Optional[UiPathResumeTrigger]):
199+
resume_trigger: Optional[UiPathResumeTrigger] = None
200+
201+
handler: WorkflowHandler = self.context.workflow.run(
202+
start_event=start_event if self.context.resume else None,
203+
ctx=self.context.workflow_context,
204+
**(self.context.input_json or {}),
205+
)
206+
207+
response_applied = False
208+
async for event in handler.stream_events():
209+
# log the received event on trace level
210+
if isinstance(event, InputRequiredEvent):
211+
# for api trigger hitl scenarios only pass the str input for processing
212+
hitl_processor = HitlProcessor(value=event.prefix)
213+
if self.context.resume and not response_applied:
214+
# If we are resuming, we need to apply the response to the event stream.
215+
response_applied = True
216+
response_event = await self.get_response_event()
217+
if response_event:
218+
# If we have a response event, send it to the workflow context.
219+
self.context.workflow_context.send_event(response_event)
220+
else:
221+
resume_trigger = await hitl_processor.create_resume_trigger()
222+
break
223+
224+
return handler, resume_trigger
225+
182226
async def validate(self) -> None:
183227
"""Validate runtime inputs and load Llama agent configuration."""
184228
try:

src/uipath_llamaindex/_cli/cli_run.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,18 +6,18 @@
66
from dotenv import load_dotenv
77
from uipath._cli._runtime._contracts import UiPathTraceContext
88
from uipath._cli.middlewares import MiddlewareResult
9-
109
from ._runtime._context import UiPathLlamaIndexRuntimeContext
1110
from ._runtime._exception import UiPathLlamaIndexRuntimeError
1211
from ._runtime._runtime import UiPathLlamaIndexRuntime
1312
from ._utils._config import LlamaIndexConfig
13+
from uipath._cli._utils._common import serialize_object
1414

1515
logger = logging.getLogger(__name__)
1616
load_dotenv()
1717

1818

1919
def llamaindex_run_middleware(
20-
entrypoint: Optional[str], input: Optional[str], resume: bool
20+
entrypoint: Optional[str], input: Optional[str], resume: bool, **kwargs
2121
) -> MiddlewareResult:
2222
"""Middleware to handle LlamaIndex agent execution"""
2323

@@ -37,6 +37,7 @@ async def execute():
3737
context.entrypoint = entrypoint
3838
context.input = input
3939
context.resume = resume
40+
context.debug = kwargs.get("debug", False)
4041
context.logs_min_level = env.get("LOG_LEVEL", "INFO")
4142
context.job_id = env.get("UIPATH_JOB_KEY")
4243
context.trace_id = env.get("UIPATH_TRACE_ID")
@@ -59,9 +60,9 @@ async def execute():
5960
async with UiPathLlamaIndexRuntime.from_context(context) as runtime:
6061
return await runtime.execute()
6162

62-
asyncio.run(execute())
63+
result = asyncio.run(execute())
6364

64-
return MiddlewareResult(should_continue=False, error_message=None)
65+
return MiddlewareResult(should_continue=False, error_message=None, output=serialize_object(result.output))
6566

6667
except UiPathLlamaIndexRuntimeError as e:
6768
return MiddlewareResult(

0 commit comments

Comments
 (0)