How to use the datalab.bigquery._api.Api function in datalab

To help you get started, we’ve selected a few datalab examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github googledatalab / pydatalab / legacy_tests / bigquery / table_tests.py View on Github external
def test_api_paths(self):
    name = datalab.bigquery._utils.TableName('a', 'b', 'c', 'd')
    self.assertEqual('/projects/a/datasets/b/tables/cd',
                     datalab.bigquery._api.Api._TABLES_PATH % name)
    self.assertEqual('/projects/a/datasets/b/tables/cd/data',
                     datalab.bigquery._api.Api._TABLEDATA_PATH % name)
    name = datalab.bigquery._utils.DatasetName('a', 'b')
    self.assertEqual('/projects/a/datasets/b', datalab.bigquery._api.Api._DATASETS_PATH % name)
github googledatalab / pydatalab / legacy_tests / bigquery / table_tests.py View on Github external
def test_api_paths(self):
    name = datalab.bigquery._utils.TableName('a', 'b', 'c', 'd')
    self.assertEqual('/projects/a/datasets/b/tables/cd',
                     datalab.bigquery._api.Api._TABLES_PATH % name)
    self.assertEqual('/projects/a/datasets/b/tables/cd/data',
                     datalab.bigquery._api.Api._TABLEDATA_PATH % name)
    name = datalab.bigquery._utils.DatasetName('a', 'b')
    self.assertEqual('/projects/a/datasets/b', datalab.bigquery._api.Api._DATASETS_PATH % name)
github googledatalab / pydatalab / legacy_tests / bigquery / table_tests.py View on Github external
def test_api_paths(self):
    name = datalab.bigquery._utils.TableName('a', 'b', 'c', 'd')
    self.assertEqual('/projects/a/datasets/b/tables/cd',
                     datalab.bigquery._api.Api._TABLES_PATH % name)
    self.assertEqual('/projects/a/datasets/b/tables/cd/data',
                     datalab.bigquery._api.Api._TABLEDATA_PATH % name)
    name = datalab.bigquery._utils.DatasetName('a', 'b')
    self.assertEqual('/projects/a/datasets/b', datalab.bigquery._api.Api._DATASETS_PATH % name)
github googledatalab / pydatalab / datalab / bigquery / _job.py View on Github external
def _create_api(self, context):
    return _api.Api(context)
github googledatalab / pydatalab / datalab / bigquery / _table.py View on Github external
def __init__(self, name, context=None):
    """Initializes an instance of a Table object. The Table need not exist yet.

    Args:
      name: the name of the table either as a string or a 3-part tuple (projectid, datasetid, name).
        If a string, it must have the form ':.' or '.<table></table><table></table>'.
      context: an optional Context object providing project_id and credentials. If a specific
        project id or credentials are unspecified, the default ones configured at the global
        level are used.
    Raises:
      Exception if the name is invalid.
    """
    if context is None:
      context = datalab.context.Context.default()
    self._context = context
    self._api = _api.Api(context)
    self._name_parts = _utils.parse_table_name(name, self._api.project_id)
    self._full_name = '%s:%s.%s%s' % self._name_parts
    self._info = None
    self._cached_page = None
    self._cached_page_index = 0
    self._schema = None
github googledatalab / pydatalab / datalab / bigquery / _query.py View on Github external
project id or credentials are unspecified, the default ones configured at the global
          level are used.
      values: a dictionary used to expand variables if passed a SqlStatement or a string with
          variable references.
      udfs: array of UDFs referenced in the SQL.
      data_sources: dictionary of federated (external) tables referenced in the SQL.
      kwargs: arguments to use when expanding the variables if passed a SqlStatement
          or a string with variable references.

    Raises:
      Exception if expansion of any variables failed.
      """
    if context is None:
      context = datalab.context.Context.default()
    self._context = context
    self._api = _api.Api(context)
    self._data_sources = data_sources
    self._udfs = udfs

    if data_sources is None:
      data_sources = {}

    self._results = None
    self._code = None
    self._imports = []
    if values is None:
      values = kwargs

    self._sql = datalab.data.SqlModule.expand(sql, values)

    # We need to take care not to include the same UDF code twice so we use sets.
    udfs = set(udfs if udfs else [])
github googledatalab / pydatalab / datalab / bigquery / _api.py View on Github external
def datasets_update(self, dataset_name, dataset_info):
    """Updates the Dataset info.

    Args:
      dataset_name: the name of the dataset to update as a tuple of components.
      dataset_info: the Dataset resource with updated fields.
    """
    url = Api._ENDPOINT + (Api._DATASETS_PATH % dataset_name)
    return datalab.utils.Http.request(url, method='PUT', data=dataset_info,
                                      credentials=self._credentials)