Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_dataframe_table_from_inputs():
called = {}
@solid(input_defs=[InputDefinition('df', DataFrame)])
def df_as_config(_context, df):
assert df.to_dict('list') == {'num1': [1, 3], 'num2': [2, 4]}
called['yup'] = True
@pipeline
def test_pipeline():
df_as_config()
result = execute_pipeline(
test_pipeline,
{
'solids': {
'df_as_config': {
'inputs': {'df': {'table': {'path': script_relative_path('num_table.txt')}}}
}
}
},
)
assert result.success
assert called['yup']
compute_fn=lambda context, args: _set_key_value(did_run_dict, 'step_one', True),
output_def=OutputDefinition(),
)
step_two_solid = single_output_solid(
name='step_two_solid',
input_defs=[InputDefinition('step_one_solid')],
compute_fn=lambda context, args: _set_key_value(did_run_dict, 'step_two', True),
output_def=OutputDefinition(),
)
@pipeline
def pipe():
step_two_solid(step_one_solid())
pipeline_result = execute_pipeline(pipe)
assert pipeline_result.success
for result in pipeline_result.solid_result_list:
assert result.success
assert did_run_dict['step_one'] is True
assert did_run_dict['step_two'] is True
config: 2
ingest_b:
config: 3
context:
''',
'''
solids:
ingest_a:
config: 2
ingest_b:
config: 3
''',
]
for yaml_variant in yaml_variants:
result = execute_pipeline(
define_part_nine_step_one_pipeline(), yaml.load(yaml_variant)
)
assert result.success
assert result.result_for_solid('ingest_a').transformed_value() == 2
assert result.result_for_solid('ingest_b').transformed_value() == 3
assert result.result_for_solid('add_ints').transformed_value() == 5
assert result.result_for_solid('mult_ints').transformed_value() == 6
def test_two_cliques():
@lambda_solid
def return_two():
return 2
@pipeline
def diamond_pipeline():
return (return_one(), return_two())
result = execute_pipeline(diamond_pipeline)
assert result.result_for_solid('return_one').output_value() == 1
assert result.result_for_solid('return_two').output_value() == 2
def test_multilevel_good_error_handling_config_solids_name_solids():
@solid(config_field=Field(Optional[Int]))
def good_error_handling(_context):
pass
@pipeline
def pipeline_def():
good_error_handling()
execute_pipeline(
pipeline_def, environment_dict={'solids': {'good_error_handling': {'config': None}}}
)
def test_cloudwatch_logging(cloudwatch_client):
res = execute_pipeline(
define_hello_cloudwatch_pipeline(),
{
'loggers': {
'cloudwatch': {
'config': {
'log_group_name': TEST_CLOUDWATCH_LOG_GROUP_NAME,
'log_stream_name': TEST_CLOUDWATCH_LOG_STREAM_NAME,
'aws_region': AWS_REGION,
}
}
}
},
)
now = millisecond_timestamp(datetime.datetime.utcnow())
def _t_fn(*_args):
yield Result(output_name='mismatch', value='foo')
solid = SolidDefinition(
name='multiple_outputs',
inputs=[],
outputs=[
OutputDefinition(name='output_one'),
],
transform_fn=_t_fn,
)
pipeline = PipelineDefinition(solids=[solid])
with pytest.raises(DagsterInvariantViolationError):
execute_pipeline(pipeline)
def test_no_env_missing_required_error_handling():
@solid(config_field=Field(Int))
def required_int_solid(_context):
pass
pipeline_def = PipelineDefinition(
name='no_env_missing_required_error', solid_defs=[required_int_solid]
)
with pytest.raises(DagsterInvalidConfigError) as pe_info:
execute_pipeline(pipeline_def)
assert isinstance(pe_info.value, DagsterInvalidConfigError)
pe = pe_info.value
assert len(pe.errors) == 1
mfe = pe.errors[0]
assert mfe.reason == DagsterEvaluationErrorReason.MISSING_REQUIRED_FIELD
assert len(pe.errors) == 1
assert pe.errors[0].message == (
'''Missing required field "solids" at document config root. '''
'''Available Fields: "['execution', 'loggers', '''
def test_intro_tutorial_part_twelve_step_two_pipeline():
with pytest.raises(DagsterTypeError):
execute_pipeline(define_part_twelve_step_two_pipeline())
def test_single_solid_pipeline_success():
events = defaultdict(list)
@lambda_solid
def solid_one():
return 1
def _event_callback(record):
if record.is_dagster_event:
events[record.dagster_event.event_type].append(record)
pipeline_def = PipelineDefinition(
name='single_solid_pipeline', solid_defs=[solid_one], mode_defs=[mode_def(_event_callback)]
)
result = execute_pipeline(pipeline_def, {'loggers': {'callback': {}}})
assert result.success
assert events
start_event = single_dagster_event(events, DagsterEventType.STEP_START)
assert start_event.pipeline_name == 'single_solid_pipeline'
assert start_event.dagster_event.solid_name == 'solid_one'
assert start_event.dagster_event.solid_definition_name == 'solid_one'
output_event = single_dagster_event(events, DagsterEventType.STEP_OUTPUT)
assert output_event
assert output_event.dagster_event.step_output_data.output_name == 'result'
assert output_event.dagster_event.step_output_data.intermediate_materialization is None
success_event = single_dagster_event(events, DagsterEventType.STEP_SUCCESS)
assert success_event.pipeline_name == 'single_solid_pipeline'
assert success_event.dagster_event.solid_name == 'solid_one'