How to use the datalab.storage.Item function in datalab

To help you get started, we’ve selected a few datalab examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github googledatalab / pydatalab / datalab / storage / commands / _storage.py View on Github external
def _get_item_contents(source_name):
  source_bucket, source_key = datalab.storage._bucket.parse_name(source_name)
  if source_bucket is None:
    raise Exception('Invalid source object name %s; no bucket specified.' % source_name)
  if source_key is None:
    raise Exception('Invalid source object name %si; source cannot be a bucket.' % source_name)
  source = datalab.storage.Item(source_bucket, source_key)
  if not source.exists():
    raise Exception('Source object %s does not exist' % source_name)
  return source.read_from()
github googledatalab / pydatalab / datalab / storage / commands / _storage.py View on Github external
def _storage_delete(args, _):
  """ Delete one or more buckets or objects. """
  items = _expand_list(args['bucket'])
  items.extend(_expand_list(args['object']))
  errs = []
  for item in items:
    try:
      bucket, key = datalab.storage._bucket.parse_name(item)
      if bucket and key:
        gcs_item = datalab.storage.Item(bucket, key)
        if gcs_item.exists():
          datalab.storage.Item(bucket, key).delete()
        else:
          errs.append("%s does not exist" % item)
      elif bucket:
        gcs_bucket = datalab.storage.Bucket(bucket)
        if gcs_bucket.exists():
          gcs_bucket.delete()
        else:
          errs.append("%s does not exist" % item)
      else:
        raise Exception("Can't delete item with invalid name %s" % item)
    except Exception as e:
      errs.append("Couldn't delete %s: %s" %
                  (item, _extract_storage_api_response_error(str(e))))
  if errs:
    raise Exception('\n'.join(errs))
github googledatalab / pydatalab / datalab / storage / commands / _storage.py View on Github external
if names is None:
    names = []
  elif isinstance(names, basestring):
    names = [names]

  results = []  # The expanded list.
  items = {}  # Cached contents of buckets; used for matching.
  for name in names:
    bucket, key = datalab.storage._bucket.parse_name(name)
    results_len = len(results)  # If we fail to add any we add name and let caller deal with it.
    if bucket:
      if not key:
        # Just a bucket; add it.
        results.append('gs://%s' % bucket)
      elif datalab.storage.Item(bucket, key).exists():
        results.append('gs://%s/%s' % (bucket, key))
      else:
        # Expand possible key values.
        if bucket not in items and key[:1] == '*':
          # We need the full list; cache a copy for efficiency.
          items[bucket] = [item.metadata.name
                           for item in list(datalab.storage.Bucket(bucket).items())]
        # If we have a cached copy use it
        if bucket in items:
          candidates = items[bucket]
        # else we have no cached copy but can use prefix matching which is more efficient than
        # getting the full contents.
        else:
          # Get the non-wildchar prefix.
          match = re.search('\?|\*|\[', key)
          prefix = key
github googledatalab / pydatalab / datalab / storage / commands / _storage.py View on Github external
def _storage_write(args, _):
  target_name = args['object']
  target_bucket, target_key = datalab.storage._bucket.parse_name(target_name)
  if target_bucket is None or target_key is None:
    raise Exception('Invalid target object name %s' % target_name)
  target = datalab.storage.Item(target_bucket, target_key)
  ipy = IPython.get_ipython()
  contents = ipy.user_ns[args['variable']]
  # TODO(gram): would we want to to do any special handling here; e.g. for DataFrames?
  target.write_to(str(contents), args['content_type'])