How to use six - 10 common examples

To help you get started, we’ve selected a few six examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github miyosuda / TensorFlowAndroidMNIST / jni-build / jni / include / tensorflow / python / ops / data_flow_grad.py View on Github external
def _DynamicStitchGrads(op, grad):
  """Gradients for DynamicStitch."""

  num_values = len(op.inputs) // 2
  indices_grad = [None] * num_values

  def AsInt32(x):
    return (x if op.inputs[0].dtype == dtypes.int32 else
            math_ops.cast(x, dtypes.int32))
  inputs = [AsInt32(op.inputs[i]) for i in xrange(num_values)]
  if isinstance(grad, ops.IndexedSlices):
    output_shape = array_ops.shape(op.outputs[0])
    output_rows = output_shape[0]
    grad = math_ops.unsorted_segment_sum(grad.values, grad.indices, output_rows)
  values_grad = [array_ops.gather(grad, inp) for inp in inputs]
  return indices_grad + values_grad
github rgal / gym-2048 / gym_2048 / envs / game2048_env.py View on Github external
draw.rectangle([0, 0, 4 * grid_size, 4 * grid_size], grey)
            fnt = ImageFont.truetype('Arial.ttf', 30)

            for y in range(4):
              for x in range(4):
                 o = self.get(y, x)
                 if o:
                     draw.rectangle([x * grid_size, y * grid_size, (x + 1) * grid_size, (y + 1) * grid_size], tile_colour_map[o])
                     (text_x_size, text_y_size) = draw.textsize(str(o), font=fnt)
                     draw.text((x * grid_size + (grid_size - text_x_size) // 2, y * grid_size + (grid_size - text_y_size) // 2), str(o), font=fnt, fill=white)
                     assert text_x_size < grid_size
                     assert text_y_size < grid_size

            return np.asarray(pil_board).swapaxes(0, 1)

        outfile = StringIO() if mode == 'ansi' else sys.stdout
        s = 'Score: {}\n'.format(self.score)
        s += 'Highest: {}\n'.format(self.highest())
        npa = np.array(self.Matrix)
        grid = npa.reshape((self.size, self.size))
        s += "{}\n".format(grid)
        outfile.write(s)
        return outfile
github chainer / chainer / chainer / links / caffe / caffe_function.py View on Github external
blobs = layer.blobs
        param = layer.convolution_param
        ksize = _get_ksize(param)
        stride = _get_stride(param)
        pad = _get_pad(param)
        num = _get_num(blobs[0])
        channels = _get_channels(blobs[0])

        n_in = channels * param.group
        n_out = num
        func = convolution_2d.Convolution2D(n_in, n_out, ksize, stride, pad,
                                            nobias=not param.bias_term)
        func.W.data[...] = 0

        part_size = len(blobs[0].data) // param.group
        for i in six.moves.range(param.group):
            in_slice = slice(i * n_in // param.group,
                             (i + 1) * n_in // param.group)
            out_slice = slice(i * n_out // param.group,
                              (i + 1) * n_out // param.group)
            w = func.W.data[out_slice, in_slice]

            data = numpy.array(
                blobs[0].data[i * part_size:(i + 1) * part_size])
            w[:] = data.reshape(w.shape)

        if param.bias_term:
            func.b.data[:] = blobs[1].data

        with self.init_scope():
            setattr(self, layer.name, func)
        self.forwards[layer.name] = _CallChildLink(self, layer.name)
github freeipa / freeipa / ipatests / test_ipalib / test_parameters.py View on Github external
import pytest

import six
from cryptography import x509 as crypto_x509
from cryptography.hazmat.backends import default_backend

from ipatests.util import raises, ClassChecker, read_only
from ipatests.util import dummy_ugettext, assert_equal
from ipatests.data import binary_bytes, utf8_bytes, unicode_str
from ipalib import parameters, text, errors, config, x509
from ipalib.constants import TYPE_ERROR, CALLABLE_ERROR
from ipalib.errors import ValidationError, ConversionError
from ipalib import _
from ipapython.dn import DN

if six.PY3:
    unicode = str
    long = int

NULLS = (None, b'', u'', tuple(), [])

pytestmark = pytest.mark.tier0


class test_DefaultFrom(ClassChecker):
    """
    Test the `ipalib.parameters.DefaultFrom` class.
    """
    _cls = parameters.DefaultFrom

    def test_init(self):
        """
github matrix-org / synapse / tests / utils.py View on Github external
mock_request.path = path.split('?')[0]
            path = mock_request.path
        except Exception:
            pass

        if isinstance(path, bytes):
            path = path.decode('utf8')

        for (method, pattern, func) in self.callbacks:
            if http_method != method:
                continue

            matcher = pattern.match(path)
            if matcher:
                try:
                    args = [urlparse.unquote(u) for u in matcher.groups()]

                    (code, response) = yield func(mock_request, *args)
                    defer.returnValue((code, response))
                except CodeMessageException as e:
                    defer.returnValue((e.code, cs_error(e.msg, code=e.errcode)))

        raise KeyError("No event can handle %s" % path)
github getsentry / sentry / tests / sentry / api / endpoints / test_debug_files.py View on Github external
assert dsym['cpuName'] == 'any'
        assert dsym['headers'] == {
            'Content-Type': 'text/x-proguard+plain'}
        assert dsym['objectName'] == 'proguard-mapping'
        assert dsym['sha1'] == 'e6d3c5185dac63eddfdc1a5edfffa32d46103b44'
        assert dsym['symbolType'] == 'proguard'
        assert dsym['uuid'] == '6dc7fdb0-d2fb-4c8e-9d6b-bb1aa98929b1'

        # Test download
        response = self.client.get(url + "?id=" + download_id)

        assert response.status_code == 200, response.content
        assert response.get(
            'Content-Disposition') == 'attachment; filename="' + PROGUARD_UUID + '.txt"'
        assert response.get(
            'Content-Length') == text_type(len(PROGUARD_SOURCE))
        assert response.get('Content-Type') == 'application/octet-stream'
        assert PROGUARD_SOURCE == BytesIO(
            b"".join(response.streaming_content)).getvalue()

        # Login user with no permissions
        user_no_permission = self.create_user('baz@localhost', username='baz')
        self.login_as(user=user_no_permission)
        response = self.client.get(url + "?id=" + download_id)
        assert response.status_code == 403, response.content

        # Try to delete with no permissions
        response = self.client.delete(url + "?id=" + download_id)
        assert response.status_code == 403, response.content

        # Login again with permissions
        self.login_as(user=self.user)
github arnimarj / py-pointless / tests / python_api / test_primvector.py View on Github external
def testPop(self):
		w = pointless.PointlessPrimVector('u32')
		self.assertRaises(IndexError, w.pop)

		w = pointless.PointlessPrimVector('u32', sequence = six.moves.range(1000))

		self.assert_(len(w) == 1000)

		for i in six.moves.range(1000):
			n = w.pop()
			self.assert_(n == 1000 - i - 1)

		self.assert_(len(w) == 0)
		self.assertRaises(IndexError, w.pop)
github chainer / chainercv / tests / links_tests / model_tests / ssd_tests / test_multibox_loss.py View on Github external
gt_mb_labels = gt_mb_labels.array

        mb_locs = cuda.to_cpu(mb_locs)
        mb_confs = cuda.to_cpu(mb_confs)
        gt_mb_locs = cuda.to_cpu(gt_mb_locs)
        gt_mb_labels = cuda.to_cpu(gt_mb_labels)
        loc_loss = cuda.to_cpu(loc_loss.array)
        conf_loss = cuda.to_cpu(conf_loss.array)

        n_positive_total = 0
        expect_loc_loss = 0
        expect_conf_loss = 0
        for i in six.moves.xrange(gt_mb_labels.shape[0]):
            n_positive = 0
            negatives = []
            for j in six.moves.xrange(gt_mb_labels.shape[1]):
                loc = F.huber_loss(
                    mb_locs[np.newaxis, i, j],
                    gt_mb_locs[np.newaxis, i, j], 1).array
                conf = F.softmax_cross_entropy(
                    mb_confs[np.newaxis, i, j],
                    gt_mb_labels[np.newaxis, i, j]).array

                if gt_mb_labels[i, j] > 0:
                    n_positive += 1
                    expect_loc_loss += loc
                    expect_conf_loss += conf
                else:
                    negatives.append(conf)

            n_positive_total += n_positive
            if n_positive > 0:
github sphinx-doc / sphinx / tests / roots / test-root / autodoc_target.py View on Github external
def __get__(self, obj, type=None):
        if obj is None:
            return self
        return 42

    def meth(self):
        """Function."""
        return "The Answer"


class CustomDataDescriptorMeta(type):
    """Descriptor metaclass docstring."""


@add_metaclass(CustomDataDescriptorMeta)
class CustomDataDescriptor2(CustomDataDescriptor):
    """Descriptor class with custom metaclass docstring."""


def _funky_classmethod(name, b, c, d, docstring=None):
    """Generates a classmethod for a class from a template by filling out
    some arguments."""
    def template(cls, a, b, c, d=4, e=5, f=6):
        return a, b, c, d, e, f
    from functools import partial
    function = partial(template, b=b, c=c, d=d)
    function.__name__ = name
    function.__doc__ = docstring
    return classmethod(function)
github google-research / task_adaptation / task_adaptation / data / data_testing_lib.py View on Github external
# See the License for the specific language governing permissions and
# limitations under the License.

"""Library for testing the dataset wrappers."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import abc
import six
from task_adaptation.data import base
import tensorflow as tf


@six.add_metaclass(abc.ABCMeta)
class BaseDataTest(tf.test.TestCase):
  """Base class for testing subclasses of base.ImageData.

  To use this testing library, subclass BaseDataTest and override setUp().
  Pass into BaseDataTest's setUp method the expected statistics for the
  specific dataset being tested. These statistics are stored as instance
  attributes to be used in the tests.

  Attributes:
    data_wrapper: Subclass of base.ImageData for testing.
    default_label_key: str, key of the default output label tensor.
    expected_num_classes: Dict with the expected number of classes for each
      output label tensor.
    expected_num_samples: Dict containing expected number of examples in the
      "train", "val", "trainval", and "test" splits of the dataset.
    required_tensors_shapes: Dictionary with the names of the tensors that