@@ -42,6 +42,13 @@ class JobOutput:
42
42
UpdateID = str
43
43
Workflow = Type
44
44
45
+
46
+ @dataclass
47
+ class Update :
48
+ id : UpdateID
49
+ arg : I
50
+
51
+
45
52
_sdk_internals_pending_tasks_count = 0
46
53
_sdk_internals_handler_mutex = asyncio .Lock ()
47
54
@@ -54,11 +61,11 @@ def _sdk_internals_all_handlers_completed(self) -> bool:
54
61
55
62
@asynccontextmanager
56
63
async def _sdk_internals__track_pending__wait_until_ready__serialize_execution (
57
- ready_condition : Callable [[], bool ]
64
+ execute_condition : Callable [[], bool ]
58
65
):
59
66
global _sdk_internals_pending_tasks_count
60
67
_sdk_internals_pending_tasks_count += 1
61
- await workflow .wait_condition (ready_condition )
68
+ await workflow .wait_condition (execute_condition )
62
69
await _sdk_internals_handler_mutex .acquire ()
63
70
try :
64
71
yield
@@ -72,29 +79,30 @@ class SDKInternals:
72
79
# pending tasks tracking, and synchronization functionality. This is a fake implementation: the
73
80
# real implementation will automatically inspect and wrap the user's declared update handlers.
74
81
75
- def ready_to_execute (self , arg : I ) -> bool :
76
- # Implemented by user
82
+ def ready_to_execute (self , update : Update ) -> bool :
83
+ # Overridden by users who wish to control order of execution
77
84
return True
78
85
79
86
@workflow .update
80
87
async def run_shell_script_job (self , arg : I ) -> O :
81
88
handler = getattr (self , "_" + inspect .currentframe ().f_code .co_name )
82
89
async with _sdk_internals__track_pending__wait_until_ready__serialize_execution (
83
- lambda : self .ready_to_execute (arg )
90
+ lambda : self .ready_to_execute (Update ( arg . id , arg ) )
84
91
):
85
92
return await handler (arg )
86
93
87
94
@workflow .update
88
95
async def run_python_job (self , arg : I ) -> O :
89
96
handler = getattr (self , "_" + inspect .currentframe ().f_code .co_name )
90
97
async with _sdk_internals__track_pending__wait_until_ready__serialize_execution (
91
- lambda : self .ready_to_execute (arg )
98
+ lambda : self .ready_to_execute (Update ( arg . id , arg ) )
92
99
):
93
100
return await handler (arg )
94
101
95
102
96
103
# Monkey-patch proposed new public API
97
104
setattr (workflow , "all_handlers_completed" , _sdk_internals_all_handlers_completed )
105
+ setattr (workflow , "Update" , Update )
98
106
##
99
107
## END SDK internals prototype
100
108
##
@@ -115,12 +123,13 @@ async def run(self):
115
123
await workflow .wait_condition (
116
124
lambda : (
117
125
workflow .info ().is_continue_as_new_suggested ()
118
- and self .all_handlers_completed ()
126
+ and workflow .all_handlers_completed ()
119
127
)
120
128
)
121
129
workflow .continue_as_new ()
122
130
123
- def ready_to_execute (self , job : Job ) -> bool :
131
+ def ready_to_execute (self , update : workflow .Update ) -> bool :
132
+ job = update .arg
124
133
if not set (job .depends_on ) <= self .completed_tasks :
125
134
return False
126
135
if after_time := job .after_time :
@@ -129,45 +138,41 @@ def ready_to_execute(self, job: Job) -> bool:
129
138
return True
130
139
131
140
# These are the real handler functions. When we implement SDK support, these will use the
132
- # @workflow.update decorator and will not use an underscore prefix.
141
+ # decorator form commented out below, and will not use an underscore prefix.
133
142
134
- # @workflow.update
143
+ # @workflow.update(execute_condition=ready_to_execute)
135
144
async def _run_shell_script_job (self , job : Job ) -> JobOutput :
136
- try :
137
- if security_errors := await workflow .execute_activity (
138
- run_shell_script_security_linter ,
139
- args = [job .run ],
140
- start_to_close_timeout = timedelta (seconds = 10 ),
141
- ):
142
- return JobOutput (status = 1 , stdout = "" , stderr = security_errors )
143
- job_output = await workflow .execute_activity (
144
- run_job , args = [job ], start_to_close_timeout = timedelta (seconds = 10 )
145
- )
146
- return job_output
147
- finally :
148
- # FIXME: unbounded memory usage
149
- self .completed_tasks .add (job .id )
145
+ if security_errors := await workflow .execute_activity (
146
+ run_shell_script_security_linter ,
147
+ args = [job .run ],
148
+ start_to_close_timeout = timedelta (seconds = 10 ),
149
+ ):
150
+ return JobOutput (status = 1 , stdout = "" , stderr = security_errors )
151
+ job_output = await workflow .execute_activity (
152
+ run_job , args = [job ], start_to_close_timeout = timedelta (seconds = 10 )
153
+ )
154
+ # FIXME: unbounded memory usage
155
+ self .completed_tasks .add (job .id )
156
+ return job_output
150
157
151
- # @workflow.update
158
+ # @workflow.update(execute_condition=ready_to_execute)
152
159
async def _run_python_job (self , job : Job ) -> JobOutput :
153
- try :
154
- if not await workflow .execute_activity (
155
- check_python_interpreter_version ,
156
- args = [job .python_interpreter_version ],
157
- start_to_close_timeout = timedelta (seconds = 10 ),
158
- ):
159
- return JobOutput (
160
- status = 1 ,
161
- stdout = "" ,
162
- stderr = f"Python interpreter version { job .python_interpreter_version } is not available" ,
163
- )
164
- job_output = await workflow .execute_activity (
165
- run_job , args = [job ], start_to_close_timeout = timedelta (seconds = 10 )
160
+ if not await workflow .execute_activity (
161
+ check_python_interpreter_version ,
162
+ args = [job .python_interpreter_version ],
163
+ start_to_close_timeout = timedelta (seconds = 10 ),
164
+ ):
165
+ return JobOutput (
166
+ status = 1 ,
167
+ stdout = "" ,
168
+ stderr = f"Python interpreter version { job .python_interpreter_version } is not available" ,
166
169
)
167
- return job_output
168
- finally :
169
- # FIXME: unbounded memory usage
170
- self .completed_tasks .add (job .id )
170
+ job_output = await workflow .execute_activity (
171
+ run_job , args = [job ], start_to_close_timeout = timedelta (seconds = 10 )
172
+ )
173
+ # FIXME: unbounded memory usage
174
+ self .completed_tasks .add (job .id )
175
+ return job_output
171
176
172
177
173
178
@activity .defn
0 commit comments