How to use the wandb.keras.WandbCallback function in wandb

To help you get started, we’ve selected a few wandb examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github wandb / client / edgeml_tests / test_tensorflow_keras.py View on Github external
def test_keras_log_weights(dummy_model, dummy_data, wandb_init_run):
    dummy_model.fit(*dummy_data, epochs=2, batch_size=36, validation_data=dummy_data,
                    callbacks=[WandbCallback(data_type="image", log_weights=True)])
    assert wandb_init_run.history.rows[0]['parameters/dense.weights']['_type'] == "histogram"
github wandb / client / edgeml_tests / test_tensorflow_keras.py View on Github external
def test_keras_image_multiclass(dummy_model, dummy_data, wandb_init_run):
    dummy_model.fit(*dummy_data, epochs=2, batch_size=36, validation_data=dummy_data,
                    callbacks=[WandbCallback(data_type="image", predictions=10)])
    assert len(wandb_init_run.history.rows[0]["examples"]['captions']) == 10
github lukas / ml-class / keras-transfer / dogcat-bottleneck.py View on Github external
base_model = VGG16(include_top=False, weights='imagenet')
            indices = np.random.randint(val_data.shape[0], size=36)
            test_data = val_data[indices]
            features = base_model.predict(np.array([preprocess_input(data) for data in test_data]))
            pred_data = model.predict(features)
            wandb.log({
                  "examples": [
                        wandb.Image(test_data[i], caption="cat" if pred_data[i] < 0.5 else "dog")
                        for i, data in enumerate(test_data)]
            }, commit=False)

    model.fit(X_train, y_train,
              epochs=config.epochs,
              batch_size=config.batch_size,
              validation_data=(X_test, y_test),
              callbacks=[Images(), WandbCallback(save_model=False)])
    model.save_weights(top_model_weights_path)
github lukas / ml-class / keras-audio / gru-composer.py View on Github external
def train(model, network_input, network_output):
    """ train the neural network """
    filepath = "mozart.hdf5"
    checkpoint = ModelCheckpoint(
        filepath,
        monitor='loss',
        verbose=0,
        save_best_only=True,
        mode='min'
    )
    callbacks_list = [Midi(), wandb.keras.WandbCallback(), checkpoint]

    model.fit(network_input, network_output, epochs=200,
              batch_size=128, callbacks=callbacks_list)
github lukas / ml-class / keras-mlp / mlp.py View on Github external
# create model
model=Sequential()
model.add(Flatten(input_shape=(img_width,img_height)))
model.add(Dropout(0.4))
model.add(Dense(config.hidden_nodes, activation='relu'))
model.add(Dropout(0.4))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer=config.optimizer,
                    metrics=['accuracy'])

# Fit the model
model.fit(X_train, y_train, validation_data=(X_test, y_test), 
      epochs=config.epochs,
      callbacks=[WandbCallback(data_type="image", labels=labels)])
github lukas / ml-class / mobile / tfjs-emotion / train.py View on Github external
train_faces /= 255.
val_faces /= 255.

# Define the model here, CHANGEME
model = Sequential()
model.add(Flatten(input_shape=input_shape))
model.add(Dense(num_classes, activation="softmax"))
model.compile(optimizer='adam', loss='categorical_crossentropy',
              metrics=['accuracy'])

# log the number of total parameters
config.total_params = model.count_params()
model.fit(train_faces, train_emotions, batch_size=config.batch_size,
          epochs=config.num_epochs, verbose=1, callbacks=[
              Perf(val_faces),
              WandbCallback(data_type="image", labels=[
                            "Angry", "Disgust", "Fear", "Happy", "Sad", "Surprise", "Neutral"])
          ], validation_data=(val_faces, val_emotions))

# save the model
model.save("emotion.h5")
github lukas / ml-class / videos / intro / perceptron-logistic.py View on Github external
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
labels = range(10)

num_classes = y_train.shape[1]

# create model
model=Sequential()
model.add(Flatten(input_shape=(img_width,img_height)))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='mse', optimizer='adam',
                metrics=['accuracy'])

# Fit the model
model.fit(X_train, y_train, epochs=config.epochs, validation_data=(X_test, y_test),
                    callbacks=[WandbCallback(labels=labels, data_type="image")])
github lukas / ml-class / videos / intro / perceptron-single.py View on Github external
is_five_test = y_test == 5
labels = ["Not Five", "Is Five"]

img_width = X_train.shape[1]
img_height = X_train.shape[2]

# create model
model=Sequential()
model.add(Flatten(input_shape=(img_width,img_height)))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam',
                metrics=['accuracy'])

# Fit the model
model.fit(X_train, is_five_train, epochs=config.epochs, validation_data=(X_test, is_five_test),
                    callbacks=[WandbCallback(labels=labels, data_type="image")])
github lukas / ml-class / examples / keras-cifar / cifar.py View on Github external
(X_train, y_train), (X_test, y_test) = cifar10.load_data()

# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)

model = Sequential()
model.add(Flatten())
model.add(Dense(num_classes))
model.compile(loss='mse',
              optimizer=Adam(config.learn_rate),
              metrics=['accuracy'])

model.fit(X_train, y_train, epochs=10, batch_size=128, validation_data=(X_test, y_test), 
    callbacks=[WandbCallback(data_type="image", labels=class_names)])
github lukas / ml-class / keras-transfer / dogcat-transfer-and-finetune.py View on Github external
# setup model
base_model = InceptionV3(weights='imagenet', include_top=False) #include_top=False excludes final FC layer
model = add_new_last_layer(base_model, nb_classes)
model._is_graph_network = False

# fine-tuning
setup_to_finetune(model)

model.fit_generator(
    train_generator,
    epochs=config.epochs,
    workers=2,
    steps_per_epoch=nb_train_samples * 2 / config.batch_size,
    validation_data=validation_generator,
    validation_steps=nb_train_samples / config.batch_size,
    callbacks=[WandbCallback(data_type="image", generator=validation_generator, labels=['cat', 'dog'],save_model=False)],
    class_weight='auto')

model.save('transfered.h5')