How to use the numpy.random function in numpy

To help you get started, we’ve selected a few numpy examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github ShangtongZhang / Crossprop / BasicLearner.py View on Github external
self.bias = bias
        dims[0] += int(bias[0])
        dims[1] += int(bias[1])

        if step_size_W is None:
            self.step_size_W = stepSize
        if step_size_U is None:
            self.step_size_U = stepSize

        self.step_size_W = step_size_W
        self.step_size_U = step_size_U

        if init == 'orthogonal':
            self.U = np.matrix(orthogonalInit(dims[0], dims[1]))
        else:
            self.U = np.matrix(np.random.randn(dims[0], dims[1]))

        self.use_norm = use_norm
        self.step_size_norm = 0.0
        self.step_count = 0
        self.initial_lr = self.stepSize

        self.lr_decay_factor = lr_decay_factor

        self.W = np.matrix(np.random.randn(dims[1], 1))

        self.initial_W = np.copy(self.W)
        self.initial_U = np.copy(self.U)

        if activation == 'relu':
            self.act = relu
            self.gradientAct = gradientRelu
github springer13 / tcl / benchmark / python / tcl167.py View on Github external
import tcl
import numpy as np
import time
OKGREEN = '\033[92m'
FAIL = '\033[91m'
ENDC = '\033[0m'
a = 40
m = 40
b = 36
u = 1536
n = 40
gflops = a*m*b*u*n*2/1e9
Ma = np.random.rand(2500**2).astype('f')
Mb = np.random.rand(2500**2).astype('f')
A = np.empty((u,b,a), order='f', dtype=np.float32)
B = np.empty((u,m,n), order='f', dtype=np.float32)
C = np.empty((a,n,m,b), order='f', dtype=np.float32)
tcl.randomNumaAwareInit(A)
tcl.randomNumaAwareInit(B)
tcl.randomNumaAwareInit(C)
alpha = 1.0
beta = 0.0
timeTCL = 1e100
for i in range(5):
   Mb = Ma *1.1 +  Mb #trash cache
   s = time.time()
   tcl.tensorMult( alpha, A, "u,b,a", B, "u,m,n", beta, C, "a,n,m,b" )
   timeTCL = min(timeTCL, time.time() - s)
timeNP = 1e100
github ATEC2018 / mpcnn-text-similarity / train.py View on Github external
Xtest = [dev_set[0], dev_set[1]]
ytest = dev_set[2]
# Xtest[0], Xtest[1], ytest = shuffle(Xtest[0], Xtest[1], ytest)


with tf.Session() as sess:
    input_1 = tf.placeholder(tf.int32, [None, SENTENCE_LENGTH], name="input_x1")
    input_2 = tf.placeholder(tf.int32, [None, SENTENCE_LENGTH], name="input_x2")
    input_3 = tf.placeholder(tf.float32, [None, NUM_CLASSES], name="input_y")
    dropout_keep_prob = tf.placeholder(tf.float32, name="dropout_keep_prob")

    # 加载word2vec
    inpH.loadW2V(WORD2VEC_MODEL, WORD2VEC_FORMAT)
    # initial matrix with random uniform
    initW = np.random.uniform(-0.25, 0.25, (len(vocab_processor.vocabulary_), EMBEDDING_DIM)).astype(np.float32)
    # print (initW)
    # print (type(initW))
    # exit(0)

    # print(initW)
    # sys.exit(0)

    # load any vectors from the word2vec
    print("initializing initW with pre-trained word2vec embeddings")
    for index, w in enumerate(vocab_processor.vocabulary_._mapping):
        # print('vocab-{}:{}'.format(index, w))

        arr = []
        if w in inpH.pre_emb:
            arr = inpH.pre_emb[w]
            # print('=====arr-{},{}'.format(index, arr))
github cmu-db / ottertune / server / analysis / simulation.py View on Github external
def dnn(env, config, n_loops=100):
    results = []
    x_axis = []
    memory = ReplayMemory()
    num_collections = config['num_collections']
    num_samples = config['num_samples']
    ou_process = False
    Xmin = np.zeros(env.knob_dim)
    Xmax = np.ones(env.knob_dim)
    noise = OUProcess(env.knob_dim)

    for _ in range(num_collections):
        action = np.random.rand(env.knob_dim)
        reward, _ = env.simulate(action)
        memory.push(action, reward)

    for i in range(n_loops):
        X_samples = np.random.rand(num_samples, env.knob_dim)
        if i >= 10:
            actions, rewards = memory.get_all()
            tuples = tuple(zip(actions, rewards))
            top10 = heapq.nlargest(10, tuples, key=lambda e: e[1])
            for entry in top10:
                X_samples = np.vstack((X_samples, np.array(entry[0])))
        tf.reset_default_graph()
        tf.InteractiveSession()
        model_nn = NeuralNet(n_input=X_samples.shape[1],
                             batch_size=X_samples.shape[0],
                             learning_rate=0.005,
github IntelAI / models / models / image_segmentation / tensorflow / 3d_unet / inference / fp32 / unet3d / augment.py View on Github external
def random_scale_factor(n_dim=3, mean=1, std=0.25):
    return np.random.normal(mean, std, n_dim)
github kongjiellx / AlphaZero-Renju / py / src / mcts.py View on Github external
def dirichlet_noise(self, ps):
        """ Add Dirichlet noise in the root node """

        dim = (ps.shape[0],)
        new_ps = (1 - conf.dirichlet_esp) * ps + \
            conf.dirichlet_esp * np.random.dirichlet(np.full(dim, conf.dirichlet_alpha))
        return new_ps
github landlab / landlab / landlab / components / vegetation_ca / CA_Veg.py View on Github external
self._cell_values = self.grid['cell']

        if (np.where(grid['cell']['VegetationType'] != 0)[0].shape[0] == 0):
            grid['cell']['VegetationType'] =                        \
                                    np.random.randint(0,6,grid.number_of_cells)

        VegType = grid['cell']['VegetationType']
        tp = np.zeros(grid.number_of_cells, dtype = int)
        tp[VegType == TREE] = np.random.randint(0,self._tpmax_tr,
                                    np.where(VegType==TREE)[0].shape)
        tp[VegType == SHRUB] = np.random.randint(0,self._tpmax_sh,
                                    np.where(VegType==SHRUB)[0].shape)
        tp[VegType == TREESEEDLING] = np.random.randint(0,self._tpmax_tr_s,
                                    np.where(VegType==TREESEEDLING)[0].shape)
        tp[VegType == SHRUBSEEDLING] = np.random.randint(0,self._tpmax_sh_s,
                                    np.where(VegType==SHRUBSEEDLING)[0].shape)
        grid['cell']['PlantAge'] = tp
github snipsco / snips-nlu / snips_nlu / utils.py View on Github external
def check_random_state(seed):
    """Turn seed into a :class:`numpy.random.RandomState` instance

    If seed is None, return the RandomState singleton used by np.random.
    If seed is an int, return a new RandomState instance seeded with seed.
    If seed is already a RandomState instance, return it.
    Otherwise raise ValueError.
    """
    # pylint: disable=W0212
    # pylint: disable=c-extension-no-member
    if seed is None or seed is np.random:
        return np.random.mtrand._rand  # pylint: disable=c-extension-no-member
    if isinstance(seed, (numbers.Integral, np.integer)):
        return np.random.RandomState(seed)
    if isinstance(seed, np.random.RandomState):
        return seed
    raise ValueError('%r cannot be used to seed a numpy.random.RandomState'
                     ' instance' % seed)
github juanjogg1987 / Fully_Natural_Gradient_HetMOGP / hetmogp / util.py View on Github external
def generate_toy_U(X,Q):
    arg = np.tile(X, (1,Q))
    rnd = np.tile(np.random.rand(1,Q), (X.shape))
    U = 2*rnd*np.sin(10*rnd*arg + np.random.randn(1)) + 2*rnd*np.cos(20*rnd*arg + np.random.randn(1))
    return U
github nikste / tensorflow-neat / standard_neat.py View on Github external
def add_connection(connections, genotype):
    enabled_innovations = [k for k in genotype.keys() if genotype[k]]

    enabled_connections = [connections[cns] for cns in enabled_innovations]

    # get reachable nodes
    froms = set([fr[1] for fr in enabled_connections ])
    tos = set([to[2] for to in enabled_connections])

    nodes = sorted(list(froms.union(tos)))

    # select random two:
    r1 = np.random.randint(0,len(nodes))
    r2 = np.random.randint(0,len(nodes) - 1)
    if r2 >= r1:
        r2 += 1

    r1 = nodes[r1]
    r2 = nodes[r2]
    from_node = r2 if r2 < r1 else r1
    to_node = r2 if r2 > r1 else r1

    assert(from_node < to_node)

    # prevent connections from input to input nodes and output to output nodes.
    # todo change this
    if from_node == INPUT0 and to_node == INPUT1 or from_node == OUTPUT0 and to_node == OUTPUT1:
        return add_connection( connections, genotype)