How to use the datalab.context.Context.default function in datalab

To help you get started, we’ve selected a few datalab examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github googledatalab / pydatalab / datalab / ml / _job.py View on Github external
def __init__(self, filter=None):
    """Initializes an instance of a CloudML Job list that is iteratable ("for job in jobs()").

    Args:
      filter: filter string for retrieving jobs, such as "state=FAILED"
      context: an optional Context object providing project_id and credentials.
      api: an optional CloudML API client.
    """
    self._filter = filter
    self._context = datalab.context.Context.default()
    self._api = discovery.build('ml', 'v1', credentials=self._context.credentials)
github googledatalab / pydatalab / google / datalab / kernel / __init__.py View on Github external
def _set_project_id(project_id):
    context = google.datalab.Context.default()
    context.set_project_id(project_id)
    try:
      from datalab.context import Context as _old_context
      _old_context.default().set_project_id(project_id)
    except ImportError:
      # If the old library is not loaded, then we don't have to do anything
      pass
github googledatalab / pydatalab / datalab / mlalpha / commands / _mlalpha.py View on Github external
# TODO: Remove 'extra_packages' once it is not needed by dataflow.
  if is_cloud:
    content_pipeline = \
"""import datetime
options = {
    'staging_location': os.path.join(OUTPUT_DIR, 'tmp', 'staging'),
    'temp_location': os.path.join(OUTPUT_DIR, 'tmp'),
    'job_name': '%s' + '-' + datetime.datetime.now().strftime('%%y%%m%%d-%%H%%M%%S'),
    'project': '%s',
    'extra_packages': [ml.sdk_location],
    'teardown_policy': 'TEARDOWN_ALWAYS',
    'no_save_main_session': True
}
opts = beam.pipeline.PipelineOptions(flags=[], **options)
pipeline = beam.Pipeline('DataflowPipelineRunner', options=opts)
""" % (job_name_prefix, datalab.context.Context.default().project_id)
  else:
    content_pipeline = """pipeline = beam.Pipeline('DirectPipelineRunner')\n"""

  return content_pipeline
github googledatalab / pydatalab / datalab / bigquery / _dataset.py View on Github external
def __init__(self, name, context=None):
    """Initializes an instance of a Dataset.

    Args:
      name: the name of the dataset, as a string or (project_id, dataset_id) tuple.
      context: an optional Context object providing project_id and credentials. If a specific
          project id or credentials are unspecified, the default ones configured at the global
          level are used.
    Raises:
      Exception if the name is invalid.
      """
    if context is None:
      context = datalab.context.Context.default()
    self._context = context
    self._api = _api.Api(context)
    self._name_parts = _utils.parse_dataset_name(name, self._api.project_id)
    self._full_name = '%s:%s' % self._name_parts
    self._info = None
    try:
      self._info = self._get_info()
    except datalab.utils.RequestException:
      pass
github googledatalab / pydatalab / datalab / kernel / __init__.py View on Github external
def _get_project_id():
    try:
      return _context.Context.default().project_id
    except Exception:
      return None
github googledatalab / pydatalab / datalab / bigquery / _view.py View on Github external
def __init__(self, name, context=None):
    """Initializes an instance of a View object.

    Args:
      name: the name of the view either as a string or a 3-part tuple
          (projectid, datasetid, name). If a string, it must have the form
          ':.