Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_structured_data_input(tmp_dir):
num_data = 500
data = common.structured_data(num_data)
x_train = data
y = np.random.randint(0, 3, num_data)
y_train = y
input_node = ak.StructuredDataInput(
column_names=common.COLUMN_NAMES_FROM_NUMPY,
column_types=common.COLUMN_TYPES_FROM_NUMPY)
output_node = input_node
output_node = ak.StructuredDataBlock()(output_node)
output_node = ak.ClassificationHead(loss='categorical_crossentropy',
metrics=['accuracy'])(output_node)
auto_model = ak.GraphAutoModel(input_node,
output_node,
directory=tmp_dir,
max_trials=1)
auto_model.fit(x_train, y_train, epochs=1,
validation_data=(x_train, y_train))
auto_model.predict(x_train)
def test_rnn_block(tmp_dir):
x_train = np.random.rand(100, 32, 10)
y_train = np.random.randint(5, size=100)
y_train = tf.keras.utils.to_categorical(y_train)
input_node = ak.Input()
output_node = input_node
output_node = ak.RNNBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
input_node.shape = (32, 10)
output_node[0].shape = (5,)
graph = ak.GraphAutoModel(input_node, output_node,
directory=tmp_dir,
max_trials=1)
graph.fit(x_train, y_train,
epochs=1,
batch_size=100,
verbose=False,
validation_split=0.2)
result = graph.predict(x_train)
assert result.shape == (100, 5)
def test_resnet_block(_, tmp_dir):
x_train = np.random.rand(100, 32, 32, 3)
y_train = np.random.randint(10, size=100)
y_train = tf.keras.utils.to_categorical(y_train)
input_node = ak.Input()
output_node = input_node
output_node = ak.ResNetBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
graph = ak.GraphAutoModel(input_node, output_node,
directory=tmp_dir,
max_trials=1)
graph.fit(x_train, y_train,
epochs=1,
batch_size=100,
verbose=False,
validation_split=0.2)
def test_xception_block(_, tmp_dir):
x_train = np.random.rand(100, 32, 32, 3)
y_train = np.random.randint(10, size=100)
y_train = tf.keras.utils.to_categorical(y_train)
input_node = ak.Input()
output_node = input_node
output_node = ak.XceptionBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
input_node.shape = (32, 32, 3)
output_node[0].shape = (10,)
graph = ak.GraphAutoModel(input_node, output_node,
directory=tmp_dir,
max_trials=1)
graph.fit(x_train, y_train,
epochs=1,
batch_size=100,
verbose=False,
validation_split=0.2)
def functional_api():
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
input_node = ak.ImageInput()
output_node = input_node
output_node = ak.Normalization()(output_node)
output_node = ak.ImageAugmentation()(output_node)
output_node = ak.ResNetBlock(version='next')(output_node)
output_node = ak.SpatialReduction()(output_node)
output_node = ak.DenseBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3)
clf.fit(x_train, y_train, validation_split=0.2)
return clf.evaluate(x_test, y_test)
# x_image = np.reshape(x_train, (200, 28, 28, 1))
# x_test = np.reshape(x_test, (200, 28, 28, 1))
x_image = x_train.reshape(x_train.shape + (1,))
x_test = x_test.reshape(x_test.shape + (1,))
'''x_structured = np.random.rand(x_train.shape[0], 100)
y_regression = np.random.rand(x_train.shape[0], 1)'''
x_structured = np.random.rand(x_train.shape[0], 100)
y_regression = np.random.rand(x_train.shape[0], 1)
y_classification = y_classification.reshape(-1, 1)
# y_classification = np.reshape(y_classification, (-1, 1))
# Build model and train.
automodel = ak.AutoModel(
inputs=[ak.ImageInput(),
ak.StructuredDataInput()],
outputs=[ak.RegressionHead(metrics=['mae']),
ak.ClassificationHead(loss='categorical_crossentropy',
metrics=['accuracy'])])
automodel.fit([x_image, x_structured],
[y_regression, y_classification],
validation_split=0.2)
max_words = 400
(x_train, y_train), (x_test, y_test) = tf.keras.datasets.imdb.load_data(
num_words=max_features,
index_from=3)
x_train = tf.keras.preprocessing.sequence.pad_sequences(
x_train, maxlen=max_words)
x_test = tf.keras.preprocessing.sequence.pad_sequences(x_test, maxlen=max_words)
print(x_train.dtype)
print(x_train[:10])
input_node = ak.Input()
output_node = input_node
output_node = ak.EmbeddingBlock(max_features=max_features)(output_node)
output_node = ak.ConvBlock()(output_node)
output_node = ak.SpatialReduction()(output_node)
output_node = ak.DenseBlock()(output_node)
output_node = ak.ClassificationHead()(output_node)
clf = ak.AutoModel(input_node, output_node, seed=5, max_trials=3)
clf.fit(x_train, y_train, validation_split=0.2)
return clf.evaluate(x_test, y_test)