How to use the dbt.logger.GLOBAL_LOGGER.debug function in dbt

To help you get started, we’ve selected a few dbt examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github fishtown-analytics / dbt / core / dbt / adapters / base / connections.py View on Github external
"""Open a connection if it's not already open, and assign it name
        regardless.

        The caller is responsible for putting the assigned connection into the
        in_use collection.

        :param Connection conn: A connection, in any state.
        :param str name: The name of the connection to set.
        """
        if name is None:
            name = 'master'

        conn.name = name

        if conn.state == 'open':
            logger.debug('Re-using an available connection from the pool.')
        else:
            logger.debug('Opening a new connection, currently in state {}'
                         .format(conn.state))
            conn = self.open(conn)

        return conn
github fishtown-analytics / dbt-spark / dbt / adapters / spark / connections.py View on Github external
exc = None

        for i in range(1 + creds.connect_retries):
            try:
                if creds.method == 'http':
                    cls.validate_creds(creds, ['token', 'host', 'port',
                                               'cluster', 'organization'])

                    conn_url = cls.SPARK_CONNECTION_URL.format(
                        host=creds.host,
                        port=creds.port,
                        organization=creds.organization,
                        cluster=creds.cluster
                    )

                    logger.debug("connection url: {}".format(conn_url))

                    transport = THttpClient.THttpClient(conn_url)

                    raw_token = "token:{}".format(creds.token).encode()
                    token = base64.standard_b64encode(raw_token).decode()
                    transport.setCustomHeaders({
                        'Authorization': 'Basic {}'.format(token)
                    })

                    conn = hive.connect(thrift_transport=transport)
                elif creds.method == 'thrift':
                    cls.validate_creds(creds,
                                       ['host', 'port', 'user', 'schema'])

                    conn = hive.connect(host=creds.host,
                                        port=creds.port,
github fishtown-analytics / dbt / plugins / bigquery / dbt / adapters / bigquery / connections.py View on Github external
def exception_handler(self, sql):
        try:
            yield

        except google.cloud.exceptions.BadRequest as e:
            message = "Bad request while running:\n{sql}"
            self.handle_error(e, message, sql)

        except google.cloud.exceptions.Forbidden as e:
            message = "Access denied while running:\n{sql}"
            self.handle_error(e, message, sql)

        except Exception as e:
            logger.debug("Unhandled error while running:\n{}".format(sql))
            logger.debug(e)
            if isinstance(e, dbt.exceptions.RuntimeException):
                # during a sql query, an internal to dbt exception was raised.
                # this sounds a lot like a signal handler and probably has
                # useful information, so raise it without modification.
                raise
            raise dbt.exceptions.RuntimeException(str(e)) from e
github fishtown-analytics / dbt / core / dbt / node_runners.py View on Github external
def _handle_internal_exception(self, e, ctx):
        build_path = self.node.build_path
        prefix = 'Internal error executing {}'.format(build_path)

        error = "{prefix}\n{error}\n\n{note}".format(
                     prefix=dbt.ui.printer.red(prefix),
                     error=str(e).strip(),
                     note=INTERNAL_ERROR_STRING)
        logger.debug(error)
        return dbt.compat.to_string(e)
github fishtown-analytics / dbt / core / dbt / clients / git.py View on Github external
def _checkout(cwd, repo, branch):
    logger.debug('  Checking out branch {}.'.format(branch))

    run_cmd(cwd, ['git', 'remote', 'set-branches', 'origin', branch])
    run_cmd(cwd, ['git', 'fetch', '--tags', '--depth', '1', 'origin', branch])

    tags = list_tags(cwd)

    # Prefer tags to branches if one exists
    if branch in tags:
        spec = 'tags/{}'.format(branch)
    else:
        spec = 'origin/{}'.format(branch)

    out, err = run_cmd(cwd, ['git', 'reset', '--hard', spec],
                       env={'LC_ALL': 'C'})
    return out, err
github fishtown-analytics / dbt / core / dbt / task / rpc / sql_commands.py View on Github external
def _in_thread(self, node, thread_done):
        runner = self.get_runner(node)
        try:
            self.node_results.append(runner.safe_run(self.manifest))
        except Exception as exc:
            logger.debug('Got exception {}'.format(exc), exc_info=True)
            self._raise_next_tick = exc
        finally:
            thread_done.set()
github fishtown-analytics / dbt / dbt / adapters / snowflake / impl.py View on Github external
def exception_handler(self, sql, model_name=None,
                          connection_name='master'):
        connection = self.get_connection(connection_name)

        try:
            yield
        except snowflake.connector.errors.ProgrammingError as e:
            msg = dbt.compat.to_string(e)

            logger.debug('Snowflake error: {}'.format(msg))

            if 'Empty SQL statement' in msg:
                logger.debug("got empty sql statement, moving on")
            elif 'This session does not have a current database' in msg:
                self.release_connection(connection_name)
                raise dbt.exceptions.FailedToConnectException(
                    ('{}\n\nThis error sometimes occurs when invalid '
                     'credentials are provided, or when your default role '
                     'does not have access to use the specified database. '
                     'Please double check your profile and try again.')
                    .format(msg))
            else:
                self.release_connection(connection_name)
                raise dbt.exceptions.DatabaseException(msg)
        except Exception as e:
            logger.debug("Error running SQL: %s", sql)
github fishtown-analytics / dbt / dbt / adapters / bigquery / impl.py View on Github external
def raw_execute(self, sql, model_name=None, fetch=False, **kwargs):
        conn = self.get_connection(model_name)
        client = conn.handle

        logger.debug('On %s: %s', model_name, sql)

        job_config = google.cloud.bigquery.QueryJobConfig()
        job_config.use_legacy_sql = False
        query_job = client.query(sql, job_config)

        # this blocks until the query has completed
        with self.exception_handler(sql, model_name):
            iterator = query_job.result()

        return query_job, iterator
github fishtown-analytics / dbt / dbt / parser.py View on Github external
def parse_seed_file(file_match, root_dir, package_name):
    """Parse the given seed file, returning an UnparsedNode and the agate
    table.
    """
    abspath = file_match['absolute_path']
    logger.debug("Parsing {}".format(abspath))
    to_return = {}
    table_name = os.path.basename(abspath)[:-4]
    node = UnparsedNode(
        path=file_match['relative_path'],
        name=table_name,
        root_path=root_dir,
        resource_type=NodeType.Seed,
        # Give this raw_sql so it conforms to the node spec,
        # use dummy text so it doesn't look like an empty node
        raw_sql='-- csv --',
        package_name=package_name,
        original_file_path=os.path.join(file_match.get('searched_path'),
                                        file_match.get('relative_path')),
    )
    try:
        table = dbt.clients.agate_helper.from_csv(abspath)