Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def dump_session(file_path):
"""Pickle the current python session to be used in the worker.
Note: Due to the inconsistency in the first dump of dill dump_session we
create and load the dump twice to have consistent results in the worker and
the running session. Check: https://github.com/uqfoundation/dill/issues/195
"""
dill.dump_session(file_path)
dill.load_session(file_path)
return dill.dump_session(file_path)
def dump_session(file_path):
"""Pickle the current python session to be used in the worker.
Note: Due to the inconsistency in the first dump of dill dump_session we
create and load the dump twice to have consistent results in the worker and
the running session. Check: https://github.com/uqfoundation/dill/issues/195
"""
dill.dump_session(file_path)
dill.load_session(file_path)
return dill.dump_session(file_path)
def train(self, batch_size, inputs, labels, num_epochs, learning_rate, filename):
self.batch_size = batch_size
self.learning_rate = learning_rate
self.check_training_data(self.batch_size, inputs, labels)
for j in range(num_epochs):
i = 0
print("== EPOCH: ", j, " ==")
while i+batch_size != len(inputs):
self.error = 0
self.forward_pass(inputs[i:i+batch_size])
self.calculate_error(labels[i:i+batch_size])
self.back_pass(labels[i:i+batch_size])
i += batch_size
self.error /= batch_size
print("Error: ", self.error)
dill.dump_session(filename)
i = 0
print("== EPOCH: ", j, " ==")
while i+batch_size != len(inputs):
self.error = 0
# input_batch = []
# label_batch = []
# # print(i)
# for i in range(i, i+batch_size):
# input_batch.append(inputs[i])
# label_batch.append(labels[i])
self.forward_pass(inputs[i:i+batch_size])
self.calculate_error(labels[i:i+batch_size])
self.back_pass(labels[i:i+batch_size])
i += batch_size
print("Error: ", self.error)
dill.dump_session(filename)
def persist_env(file):
"""
Persist an entire environment on file. This function depends on Dill package
:param file: file name to store the environment
"""
dill.dump_session(file)
i = 0
print("== EPOCH: ", j, " ==")
while i+batch_size != len(inputs):
self.error = 0
input_batch = []
label_batch = []
# print(i)
for i in range(i, i+batch_size):
input_batch.append(inputs[i])
label_batch.append(labels[i])
self.forward_pass(input_batch)
self.calculate_error(label_batch)
self.back_pass(label_batch)
i += 1
print("Error: ", self.error)
dill.dump_session(filename)
def dump_session(file_path):
"""For internal use only; no backwards-compatibility guarantees.
Pickle the current python session to be used in the worker.
Note: Due to the inconsistency in the first dump of dill dump_session we
create and load the dump twice to have consistent results in the worker and
the running session. Check: https://github.com/uqfoundation/dill/issues/195
"""
with _pickle_lock_unless_py2:
dill.dump_session(file_path)
dill.load_session(file_path)
return dill.dump_session(file_path)
def dump_session(file_path):
"""For internal use only; no backwards-compatibility guarantees.
Pickle the current python session to be used in the worker.
Note: Due to the inconsistency in the first dump of dill dump_session we
create and load the dump twice to have consistent results in the worker and
the running session. Check: https://github.com/uqfoundation/dill/issues/195
"""
with _pickle_lock_unless_py2:
dill.dump_session(file_path)
dill.load_session(file_path)
return dill.dump_session(file_path)
def train(self, batch_size, inputs, labels, num_epochs, learning_rate, filename):
self.batch_size = batch_size
self.learning_rate = learning_rate
for j in range(num_epochs):
i = 0
print("== EPOCH: ", j, " ==")
while i + batch_size != len(inputs):
self.error = 0
self.forward_pass(inputs[i : i + batch_size])
self.calculate_error(labels[i : i + batch_size])
self.back_pass(labels[i : i + batch_size])
i += batch_size
print("Error: ", self.error)
dill.dump_session(filename)