Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
mock_client_factory.return_value = mock_client
m = MagicMock()
type(m).id = PropertyMock(
return_value=identifier.Identifier(
identifier.ResourceType.LAUNCH_PLAN,
"project",
"domain",
"name",
"version"
)
)
labels = _common_models.Labels({"my": "label"})
engine.FlyteLaunchPlan(m).execute(
'xp', 'xd', 'xn', literals.LiteralMap({}), notification_overrides=[], label_overrides=labels
)
mock_client.create_execution.assert_called_once_with(
'xp',
'xd',
'xn',
_execution_models.ExecutionSpec(
identifier.Identifier(
identifier.ResourceType.LAUNCH_PLAN,
"project",
"domain",
"name",
"version"
),
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
from flytekit.common.exceptions import scopes
from flytekit.configuration import TemporaryConfiguration
from flytekit.engines.flyte import engine
from flytekit.models import literals, execution as _execution_models, common as _common_models, launch_plan as \
_launch_plan_models, task as _task_models
from flytekit.models.admin import common as _common
from flytekit.models.core import errors, identifier
from flytekit.sdk import test_utils
_INPUT_MAP = literals.LiteralMap(
{
'a': literals.Literal(scalar=literals.Scalar(primitive=literals.Primitive(integer=1)))
}
)
_OUTPUT_MAP = literals.LiteralMap(
{
'b': literals.Literal(scalar=literals.Scalar(primitive=literals.Primitive(integer=2)))
}
)
@pytest.fixture(scope="function", autouse=True)
def temp_config():
with TemporaryConfiguration(
os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../common/configs/local.config'),
internal_overrides={
'image': 'myflyteimage:{}'.format(
os.environ.get('IMAGE_VERSION', 'sha')
),
'project': 'myflyteproject',
'domain': 'development'
mock_client_factory.return_value = mock_client
m = MagicMock()
type(m).id = PropertyMock(
return_value=identifier.Identifier(
identifier.ResourceType.LAUNCH_PLAN,
"project",
"domain",
"name",
"version"
)
)
annotations = _common_models.Annotations({"my": "annotation"})
engine.FlyteLaunchPlan(m).launch(
'xp', 'xd', 'xn', literals.LiteralMap({}), notification_overrides=[], annotation_overrides=annotations
)
mock_client.create_execution.assert_called_once_with(
'xp',
'xd',
'xn',
_execution_models.ExecutionSpec(
identifier.Identifier(
identifier.ResourceType.LAUNCH_PLAN,
"project",
"domain",
"name",
"version"
),
_execution_models.ExecutionMetadata(
_execution_models.ExecutionMetadata.ExecutionMode.MANUAL,
:param flytekit.engines.common.EngineContext context:
:param flytekit.models.literals.LiteralMap inputs:
:rtype: dict[Text, flytekit.models.common.FlyteIdlEntity]
:returns: This function must return a dictionary mapping 'filenames' to Flyte Interface Entities. These
entities will be used by the engine to pass data from node to node, populate metadata, etc. etc.. Each
engine will have different behavior. For instance, the Flyte engine will upload the entities to a remote
working directory (with the names provided), which will in turn allow Flyte Propeller to push along the
workflow. Where as local engine will merely feed the outputs directly into the next node.
"""
spec, generated_files = self._produce_dynamic_job_spec(context, inputs)
# If no sub-tasks are requested to run, just produce an outputs file like any other single-step tasks.
if len(spec.nodes) == 0:
return {
_constants.OUTPUT_FILE_NAME: _literal_models.LiteralMap(
literals={binding.var: binding.binding.to_literal_model() for binding in spec.outputs})
}
else:
generated_files.update({
_constants.FUTURES_FILE_NAME: spec
})
return generated_files
def _append_node(generated_files, node, nodes, sub_task_node):
nodes.append(node)
for k, node_output in _six.iteritems(sub_task_node.outputs):
if not node_output.sdk_node.id:
node_output.sdk_node.assign_id_and_return(node.id)
# Upload inputs to working directory under /array_job.input_ref/inputs.pb
input_path = _os.path.join(node.id, _constants.INPUT_FILE_NAME)
generated_files[input_path] = _literal_models.LiteralMap(
literals={binding.var: binding.binding.to_literal_model() for binding in
sub_task_node.inputs})
working directory (with the names provided), which will in turn allow Flyte Propeller to push along the
workflow. Where as local engine will merely feed the outputs directly into the next node.
"""
inputs_dict = _type_helpers.unpack_literal_map_to_sdk_python_std(inputs, {
k: _type_helpers.get_sdk_type_from_literal_type(v.type) for k, v in _six.iteritems(self.interface.inputs)
})
outputs_dict = {
name: _task_output.OutputReference(_type_helpers.get_sdk_type_from_literal_type(variable.type))
for name, variable in _six.iteritems(self.interface.outputs)
}
inputs_dict.update(outputs_dict)
self._execute_user_code(context, inputs_dict)
return {
_constants.OUTPUT_FILE_NAME: _literal_models.LiteralMap(
literals={k: v.sdk_value for k, v in _six.iteritems(outputs_dict)}
)
def from_flyte_idl(cls, pb2_object):
"""
:param flyteidl.admin.launch_plan_pb2.LaunchPlanSpec pb2_object:
:rtype: LaunchPlanSpec
"""
return cls(
workflow_id=_identifier.Identifier.from_flyte_idl(pb2_object.workflow_id),
entity_metadata=LaunchPlanMetadata.from_flyte_idl(pb2_object.entity_metadata),
default_inputs=_interface.ParameterMap.from_flyte_idl(pb2_object.default_inputs),
fixed_inputs=_literals.LiteralMap.from_flyte_idl(pb2_object.fixed_inputs),
labels=_common.Labels.from_flyte_idl(pb2_object.labels),
annotations=_common.Annotations.from_flyte_idl(pb2_object.annotations),
auth=Auth.from_flyte_idl(pb2_object.auth),
)
data = _json.load(json_file)
for p in data['cells']:
meta = p['metadata']
if "outputs" in meta["tags"]:
outputs = ' '.join(p['outputs'][0]['data']['text/plain'])
if outputs is not None:
dict = _literal_models._literals_pb2.LiteralMap()
_text_format.Parse(outputs, dict)
# Add output_notebook as an output to the task.
output_notebook = _task_output.OutputReference(
_type_helpers.get_sdk_type_from_literal_type(_Types.Blob.to_flyte_literal_type()))
output_notebook.set(output_notebook_path)
output_literal_map = _literal_models.LiteralMap.from_flyte_idl(dict)
output_literal_map.literals[OUTPUT_NOTEBOOK] = output_notebook.sdk_value
return {
_constants.OUTPUT_FILE_NAME: output_literal_map
}
inputs_dict.update(outputs_dict)
with GlobalSparkContext():
_exception_scopes.user_entry_point(self.task_function)(
_sdk_runnable.ExecutionParameters(
execution_date=context.execution_date,
execution_id=context.execution_id,
stats=context.stats,
logging=context.logging,
tmp_dir=context.working_directory
),
GlobalSparkContext.get_spark_context(),
**inputs_dict
)
return {
_constants.OUTPUT_FILE_NAME: _literal_models.LiteralMap(
literals={k: v.sdk_value for k, v in _six.iteritems(outputs_dict)}
)