numpy.random.normal - python examples

Here are the examples of the python api numpy.random.normal taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.

145 Examples 7

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_correctness_2d():
    model = Sequential()
    norm = normalization.BatchNormalization(axis=1, input_shape=(10, 6), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='rmsprop')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 10, 6))
    model.fit(x, x, epochs=5, verbose=0)
    out = model.predict(x)
    out -= np.reshape(K.eval(norm.beta), (1, 10, 1))
    out /= np.reshape(K.eval(norm.gamma), (1, 10, 1))

    astert_allclose(out.mean(axis=(0, 2)), 0.0, atol=1.1e-1)
    astert_allclose(out.std(axis=(0, 2)), 1.0, atol=1.1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_correctness_1d():
    model = Sequential()
    norm = normalization.BatchNormalization(input_shape=(10,), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='rmsprop')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 10))
    model.fit(x, x, epochs=5, verbose=0)
    out = model.predict(x)
    out -= K.eval(norm.beta)
    out /= K.eval(norm.gamma)

    astert_allclose(out.mean(), 0.0, atol=1e-1)
    astert_allclose(out.std(), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_correctness_1d():
    model = Sequential()
    norm = normalization.BatchNormalization(input_shape=(10,), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='rmsprop')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 10))
    model.fit(x, x, epochs=5, verbose=0)
    out = model.predict(x)
    out -= K.eval(norm.beta)
    out /= K.eval(norm.gamma)

    astert_allclose(out.mean(), 0.0, atol=1e-1)
    astert_allclose(out.std(), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_mode_twice():
    # This is a regression test for issue #4881 with the old
    # batch normalization functions in the Theano backend.
    model = Sequential()
    model.add(normalization.BatchNormalization(input_shape=(10, 5, 5), axis=1))
    model.add(normalization.BatchNormalization(input_shape=(10, 5, 5), axis=1))
    model.compile(loss='mse', optimizer='sgd')

    x = np.random.normal(loc=5.0, scale=10.0, size=(20, 10, 5, 5))
    model.fit(x, x, epochs=1, verbose=0)
    model.predict(x)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_mode_twice():
    # This is a regression test for issue #4881 with the old
    # batch normalization functions in the Theano backend.
    model = Sequential()
    model.add(normalization.BatchNormalization(input_shape=(10, 5, 5), axis=1))
    model.add(normalization.BatchNormalization(input_shape=(10, 5, 5), axis=1))
    model.compile(loss='mse', optimizer='sgd')

    x = np.random.normal(loc=5.0, scale=10.0, size=(20, 10, 5, 5))
    model.fit(x, x, epochs=1, verbose=0)
    model.predict(x)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_correctness_1d():
    model = Sequential()
    norm = normalization.BatchNormalization(input_shape=(10,), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='rmsprop')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 10))
    model.fit(x, x, epochs=5, verbose=0)
    out = model.predict(x)
    out -= K.eval(norm.beta)
    out /= K.eval(norm.gamma)

    astert_allclose(out.mean(), 0.0, atol=1e-1)
    astert_allclose(out.std(), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
@pytest.mark.skipif((K.backend() == 'theano'),
                    reason='Bug with theano backend')
def test_batchnorm_convnet_no_center_no_scale():
    model = Sequential()
    norm = normalization.BatchNormalization(axis=-1, center=False, scale=False,
                                            input_shape=(3, 4, 4), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='sgd')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
    model.fit(x, x, epochs=4, verbose=0)
    out = model.predict(x)

    astert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
    astert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_convnet():
    model = Sequential()
    norm = normalization.BatchNormalization(axis=1, input_shape=(3, 4, 4), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='sgd')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
    model.fit(x, x, epochs=4, verbose=0)
    out = model.predict(x)
    out -= np.reshape(K.eval(norm.beta), (1, 3, 1, 1))
    out /= np.reshape(K.eval(norm.gamma), (1, 3, 1, 1))

    astert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
    astert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_convnet():
    model = Sequential()
    norm = normalization.BatchNormalization(axis=1, input_shape=(3, 4, 4), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='sgd')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
    model.fit(x, x, epochs=4, verbose=0)
    out = model.predict(x)
    out -= np.reshape(K.eval(norm.beta), (1, 3, 1, 1))
    out /= np.reshape(K.eval(norm.gamma), (1, 3, 1, 1))

    astert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
    astert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)

3 View Complete Implementation : normalization_test.py
Copyright MIT License
Author : hello-sea
@keras_test
def test_batchnorm_convnet():
    model = Sequential()
    norm = normalization.BatchNormalization(axis=1, input_shape=(3, 4, 4), momentum=0.8)
    model.add(norm)
    model.compile(loss='mse', optimizer='sgd')

    # centered on 5.0, variance 10.0
    x = np.random.normal(loc=5.0, scale=10.0, size=(1000, 3, 4, 4))
    model.fit(x, x, epochs=4, verbose=0)
    out = model.predict(x)
    out -= np.reshape(K.eval(norm.beta), (1, 3, 1, 1))
    out /= np.reshape(K.eval(norm.gamma), (1, 3, 1, 1))

    astert_allclose(np.mean(out, axis=(0, 2, 3)), 0.0, atol=1e-1)
    astert_allclose(np.std(out, axis=(0, 2, 3)), 1.0, atol=1e-1)