How to use the dagster.check.invariant function in dagster

To help you get started, we’ve selected a few dagster examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github dagster-io / dagster / python_modules / dagster / dagster / core / execution / plan / materialization_thunk.py View on Github external
def configs_for_output(solid, solid_config, output_def):
    for output_spec in solid_config.outputs:
        check.invariant(len(output_spec) == 1)
        output_name, output_spec = list(output_spec.items())[0]
        check.invariant(solid.has_output(output_name))
        if output_name == output_def.name:
            yield output_spec
github dagster-io / dagster / python_modules / libraries / dagster-ge / dagster_ge / __init__.py View on Github external
def _file_passes(_info, df):
        with open(file_path) as ff:
            expt_config = json.load(ff)
            # This is necessary because ge ends up coercing a type change
            # on the same dataframe instance, changing the type. But a
            # ge can't be copied, because it causes an error.
            # The error is
            #  AttributeError: 'PandasDataset' object has no attribute
            #  'discard_subset_failing_expectations'
            # See https://github.com/great-expectations/great_expectations/issues/342
            df_copy = copy.deepcopy(df)
            ge_df = ge.from_pandas(df_copy, expectations_config=expt_config)
            validate_result = ge_df.validate()
            check.invariant('success' in validate_result)
            check.invariant('results' in validate_result)
            return ExpectationResult(
                success=validate_result['success'],
                metadata_entries=[EventMetadataEntry.json(label='result', data=validate_result)],
            )
github dagster-io / dagster / python_modules / dagster / dagster / core / log.py View on Github external
def _log(self, method, orig_message, message_props):
        check.str_param(method, 'method')
        check.str_param(orig_message, 'orig_message')
        check.dict_param(message_props, 'message_props')

        check.invariant(
            'extra' not in message_props, 'do not allow until explicit support is handled'
        )
        check.invariant(
            'exc_info' not in message_props, 'do not allow until explicit support is handled'
        )

        check.invariant('orig_message' not in message_props, 'orig_message reserved value')
        check.invariant('message' not in message_props, 'message reserved value')
        check.invariant('log_message_id' not in message_props, 'log_message_id reserved value')
        check.invariant('log_timestamp' not in message_props, 'log_timestamp reserved value')

        log_message_id = str(uuid.uuid4())

        log_timestamp = datetime.datetime.utcnow().isoformat()

        synth_props = {
            'orig_message': orig_message,
            'log_message_id': log_message_id,
            'log_timestamp': log_timestamp,
github dagster-io / dagster / python_modules / dagster / dagster / core / execution / plan / materialization_thunk.py View on Github external
def configs_for_output(solid, solid_config, output_def):
    for output_spec in solid_config.outputs:
        check.invariant(len(output_spec) == 1)
        output_name, output_spec = list(output_spec.items())[0]
        check.invariant(solid.has_output(output_name))
        if output_name == output_def.name:
            yield output_spec
github dagster-io / dagster / python_modules / dagster / dagster / core / types / runtime / runtime_type.py View on Github external
def resolve_to_runtime_type(dagster_type):
    # circular dep
    from .python_dict import PythonDict
    from .python_set import PythonSet
    from .python_tuple import PythonTuple
    from dagster.core.types.config.config_type import ConfigType
    from dagster.core.types.wrapping.mapping import (
        remap_python_builtin_for_runtime,
        is_supported_runtime_python_builtin,
    )
    from dagster.core.types.wrapping.wrapping import transform_typing_type
    from dagster.utils.typing_api import is_typing_type

    check.invariant(
        not (isinstance(dagster_type, type) and issubclass(dagster_type, ConfigType)),
        'Cannot resolve a config type to a runtime type',
    )

    check.invariant(
        not (isinstance(dagster_type, type) and issubclass(dagster_type, RuntimeType)),
        'Do not pass runtime type classes. Got {}'.format(dagster_type),
    )

    # First check to see if it part of python's typing library
    # Transform to our wrapping type system.
    if is_typing_type(dagster_type):
        dagster_type = transform_typing_type(dagster_type)

    if isinstance(dagster_type, RuntimeType):
        return dagster_type
github dagster-io / dagster / python_modules / dagster / dagster / core / execution / api.py View on Github external
def execute_run_iterator(pipeline, pipeline_run, instance):
    check.inst_param(pipeline, 'pipeline', PipelineDefinition)
    instance = check.inst_param(instance, 'instance', DagsterInstance)
    check.invariant(pipeline_run.status == PipelineRunStatus.NOT_STARTED)

    execution_plan = create_execution_plan(
        pipeline, environment_dict=pipeline_run.environment_dict, run_config=pipeline_run
    )

    with scoped_pipeline_context(
        pipeline, pipeline_run.environment_dict, pipeline_run, instance
    ) as pipeline_context:
        for event in _pipeline_execution_iterator(pipeline_context, execution_plan, pipeline_run):
            yield event
github dagster-io / dagster / python_modules / dagster / dagster / sqlalchemy / common.py View on Github external
def check_supports_sql_alchemy_resource(context):
    check.inst_param(context, 'context', ExecutionContext)
    check.invariant(context.resources is not None)
    check.invariant(
        hasattr(context.resources, 'sa'),
        'Resources must have sa property be an object of SqlAlchemyResource',
    )
    check.inst(
        context.resources.sa,
        SqlAlchemyResource,
        'Resources must have sa property be an object of SqlAlchemyResource',
    )
    return context
github dagster-io / dagster / python_modules / dagster / dagster / core / log_manager.py View on Github external
def _prepare_message(self, orig_message, message_props):
        check.str_param(orig_message, 'orig_message')
        check.dict_param(message_props, 'message_props')

        # These are todos to further align with the Python logging API
        check.invariant(
            'extra' not in message_props, 'do not allow until explicit support is handled'
        )
        check.invariant(
            'exc_info' not in message_props, 'do not allow until explicit support is handled'
        )

        # Reserved keys in the message_props -- these are system generated.
        check.invariant('orig_message' not in message_props, 'orig_message reserved value')
        check.invariant('message' not in message_props, 'message reserved value')
        check.invariant('log_message_id' not in message_props, 'log_message_id reserved value')
        check.invariant('log_timestamp' not in message_props, 'log_timestamp reserved value')

        log_message_id = str(uuid.uuid4())

        log_timestamp = datetime.datetime.utcnow().isoformat()

        synth_props = {
            'orig_message': orig_message,
            'log_message_id': log_message_id,
            'log_timestamp': log_timestamp,
            'run_id': self.run_id,
        }

        # We first generate all props for the purpose of producing the semi-structured
github dagster-io / dagster / python_modules / dagster / dagster / core / log_manager.py View on Github external
def _prepare_message(self, orig_message, message_props):
        check.str_param(orig_message, 'orig_message')
        check.dict_param(message_props, 'message_props')

        # These are todos to further align with the Python logging API
        check.invariant(
            'extra' not in message_props, 'do not allow until explicit support is handled'
        )
        check.invariant(
            'exc_info' not in message_props, 'do not allow until explicit support is handled'
        )

        # Reserved keys in the message_props -- these are system generated.
        check.invariant('orig_message' not in message_props, 'orig_message reserved value')
        check.invariant('message' not in message_props, 'message reserved value')
        check.invariant('log_message_id' not in message_props, 'log_message_id reserved value')
        check.invariant('log_timestamp' not in message_props, 'log_timestamp reserved value')

        log_message_id = str(uuid.uuid4())

        log_timestamp = datetime.datetime.utcnow().isoformat()

        synth_props = {
            'orig_message': orig_message,
            'log_message_id': log_message_id,
github dagster-io / dagster / python_modules / dagster-graphql / dagster_graphql / implementation / execution.py View on Github external
' True'.format(schedule_name=schedule_name)
        )

    # Get environment_dict
    if schedule_def.environment_dict:
        environment_dict = schedule_def.environment_dict
    else:
        environment_dict = schedule_def.environment_dict_fn()

    # Get tags
    if schedule_def.tags:
        tags = schedule_def.tags
    else:
        tags = schedule_def.tags_fn()

    check.invariant('dagster/schedule_id' not in tags)
    tags['dagster/schedule_id'] = schedule.schedule_id

    check.invariant('dagster/schedule_name' not in tags)
    tags['dagster/schedule_name'] = schedule_def.name

    execution_metadata_tags = [{'key': key, 'value': value} for key, value in tags.items()]
    execution_params = merge_dicts(
        schedule_def.execution_params, {'executionMetadata': {'tags': execution_metadata_tags}}
    )

    selector = ExecutionSelector(
        execution_params['selector']['name'], execution_params['selector'].get('solidSubset')
    )

    execution_params = ExecutionParams(
        selector=selector,