Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_flow_handlers_are_called_even_when_initialize_run_fails(self):
class BadRunner(FlowRunner):
def initialize_run(self, *args, **kwargs):
raise SyntaxError("bad")
def handler(runner, old, new):
handler_results["Flow"] += 1
return new
flow = Flow(name="test", state_handlers=[handler])
BadRunner(flow=flow).run()
# the flow changed state twice: Pending -> Failed
assert handler_results["Flow"] == 1
def test_parameters_are_placed_into_context_and_override_current_context():
flow = Flow(name="test")
y = prefect.Parameter("y", default=99)
flow.add_task(y)
with prefect.context(parameters=dict(y=88, z=55)):
flow_state = FlowRunner(flow=flow).run(return_tasks=[y], parameters=dict(y=42))
assert isinstance(flow_state, Success)
assert flow_state.result[y].result == 42
def test_endrun_raised_in_initialize_is_caught_correctly():
class BadInitializeRunner(FlowRunner):
def initialize_run(self, *args, **kwargs):
raise ENDRUN(state=Pending())
res = BadInitializeRunner(Flow(name="test")).run()
assert res.is_pending()
def test_flow_runner_has_logger():
r = FlowRunner(Flow(name="test"))
assert r.logger.name == "prefect.FlowRunner"
b_res = b(a_res)
first_state = FlowRunner(flow=f).run(
executor=executor, parameters=dict(a=1), return_tasks=f.tasks
)
assert first_state.is_running()
a_state = first_state.result[a_res]
a_state.result = (
NoResult # remove the result to see if the cached results are picked up
)
b_state = first_state.result[b_res]
b_state.cached_inputs = dict(x=Result(2)) # artificially alter state
with raise_on_exception(): # without caching we'd expect a KeyError
second_state = FlowRunner(flow=f).run(
executor=executor, return_tasks=[b_res], task_states=first_state.result
)
assert isinstance(second_state, Success)
assert second_state.result[b_res].result == 1
def test_task_handler_that_raises_signal_is_trapped(self):
def handler(flow, old, new):
raise signals.FAIL()
flow = Flow(name="test", state_handlers=[handler])
state = FlowRunner(flow=flow).run()
assert state.is_failed()
def test_flow_runner_runs_basic_flow_with_2_dependent_tasks_and_second_task_fails():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = ErrorTask()
flow.add_edge(task1, task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Failed)
assert isinstance(flow_state.result[task1], Success)
assert isinstance(flow_state.result[task2], Failed)
def test_flow_runner_passes_along_its_init_context_to_tasks_after_serialization(
self,
):
@prefect.task
def grab_key():
return prefect.context["THE_ANSWER"]
with prefect.context(THE_ANSWER=42):
prerunner = FlowRunner(Flow(name="test", tasks=[grab_key]))
runner = cloudpickle.loads(cloudpickle.dumps(prerunner))
flow_state = runner.run(return_tasks=list(runner.flow.tasks))
assert flow_state.is_successful()
assert flow_state.result[runner.flow.tasks.pop()].result == 42
def test_task_contexts_are_provided_to_tasks():
@prefect.task(name="rc", slug="rc")
def return_context():
return prefect.context.to_dict()
with Flow(name="test") as flow:
rc = return_context()
state = FlowRunner(flow=flow).run(return_tasks=[rc])
ctx = state.result[rc].result
assert ctx["task_name"] == rc.name
assert ctx["task_slug"] == rc.slug
def test_flow_runner_runs_basic_flow_with_2_independent_tasks():
flow = Flow(name="test")
task1 = SuccessTask()
task2 = SuccessTask()
flow.add_task(task1)
flow.add_task(task2)
flow_state = FlowRunner(flow=flow).run(return_tasks=[task1, task2])
assert isinstance(flow_state, Success)
assert flow_state.result[task1] == Success(result=1)
assert flow_state.result[task2] == Success(result=1)