How to use the sklearn.externals.joblib.dump function in sklearn

To help you get started, we’ve selected a few sklearn examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github alfredfrancis / ai-chatbot-framework / core / mis.py View on Github external
target = _bulb1.values

    # setting features for prediction
    numerical_features = data[['light', 'time', 'motion']]

    # converting into numpy arrays
    features_array = numerical_features.values

    # Create linear regression object
    regr = linear_model.LinearRegression()

    # Train the model using the training sets
    regr.fit(features_array, target)

    # dump generated model to file
    joblib.dump(regr, 'mis.pkl', compress=3)

    return 'Models Generated'
github okfn-brasil / serenata-de-amor / rosie / chamber_of_deputies / classifiers / __init__.py View on Github external
def load_trained_model(self, classifier):
        filename = '{}.pkl'.format(classifier.__name__.lower())
        path = os.path.join(self.data_path, filename)
        # palliative since this model is outputting
        # a model too large to be loaded with joblib
        if filename == 'monthlysubquotalimitclassifier.pkl':
            model = classifier()
            model.fit(self.dataset)
        else:
            if os.path.isfile(path):
                model = joblib.load(path)
            else:
                model = classifier()
                model.fit(self.dataset)
                joblib.dump(model, path)
        return model
github GuiltyNeuron / ANPR / Licence_plate_recognition / Tunisian_plates / ml.py View on Github external
clf = svm.SVC(kernel='linear', C=1, gamma=1)

    # Training
    clf.fit(x_train, y_train)

    # Testing
    y_pred = clf.predict(x_test)

    # Confusion matrix
    cm = confusion_matrix(y_test, y_pred)

    # Accuracy
    accuracy = clf.score(x_test, y_test)

    # Saving the model
    joblib.dump(clf, output_file_path)

    print("Training completed")
    print("Training Accuracy : " + str(accuracy))
    print("Confusion Matrix :")
    print(cm)
github HandsomeHans / SVM-classification-localization / 7_Hard_Negative_Mining+SVM.py View on Github external
def savefeat(childDir, num_win, fd):
    fd_name = childDir.split('.')[0] + '_%d.feat' %num_win
    fd_path = os.path.join('./features/train_hnm/', fd_name)
    joblib.dump(fd, fd_path,compress=3)
github MorvanZhou / tutorials / sklearnTUT / sk11_save.py View on Github external
clf.fit(X, y)

# method 1: pickle
import pickle
# save
with open('save/clf.pickle', 'wb') as f:
    pickle.dump(clf, f)
# restore
with open('save/clf.pickle', 'rb') as f:
   clf2 = pickle.load(f)
   print(clf2.predict(X[0:1]))

# method 2: joblib
from sklearn.externals import joblib
# Save
joblib.dump(clf, 'save/clf.pkl')
# restore
clf3 = joblib.load('save/clf.pkl')
print(clf3.predict(X[0:1]))
github feranick / SpectralMachine / Archive / 20170808a / SpectraLearnPredict.py View on Github external
raise ValueError('  Force retraining SVM model')
    except:
        #**********************************************
        ''' Retrain training model if not available'''
        #**********************************************
        print('  Retraining SVM data...')
        clf = svm.SVC(C = svmDef.Cfactor, decision_function_shape = 'ovr', probability=True)
        
        print("  Training on the full training dataset\n")
        clf.fit(A,Cl)
        accur = clf.score(A_test,Cl_test)
        print('  Mean accuracy: ',100*accur,'%')

        Z = clf.decision_function(A)
        print('\n  Number of classes = ' + str(Z.shape[1]))
        joblib.dump(clf, svmTrainedData)
        if svmDef.showClasses == True:
            print('  List of classes: ' + str(clf.classes_))

    print('\n==========================================================================\n')
    return clf
github gatapia / py_ml_utils / pml / misc.py View on Github external
def dump(file, data, force=False):
  if file.endswith('.pickle.gz'):
    if os.path.isfile(file) and not force:  raise Exception('file: ' + file + ' already exists. Set force=True to overwrite.')
    with gzip.open(file, 'wb') as f: # c
      pickle.dump(data, f, protocol=pickle.HIGHEST_PROTOCOL)
      return

  if not '/' in file:
    if not os.path.isdir('data/pickles'): os.makedirs('data/pickles')
    file = 'data/pickles/' + file
  if not '.' in file: file += '.pickle'
  if os.path.isfile(file) and not force:  raise Exception('file: ' + file + ' already exists. Set force=True to overwrite.')
  sklearn.externals.joblib.dump(data, file);
github codedecde / Luna2016-Lung-Nodule-Detection / UNET / Code / LUNA_unet.py View on Github external
def on_epoch_end(self, epochs, logs = {}):
        cur_weights = self.model.get_weights()
        joblib.dump(cur_weights, self.options.save_prefix + '_script_on_epoch_' + str(epochs) + '_lr_' + str(self.options.lr) + '_WITH_STRIDES_' + str(self.options.stride) +'_FILTER_WIDTH_' + str(self.options.filter_width) + '.weights')
github thundergolfer / Insults / insults / util.py View on Github external
def save_model( clf, location=SAVEFILE_LOCATION ):
    save_path = os.path.join(os.path.dirname(__file__), SAVEFILE_LOCATION)
    make_sure_path_exists(save_path)
    save_path = os.path.join(save_path, MODEL_FILENAME)
    with open(save_path, 'w') as fh:
        _ = joblib.dump(clf, fh, compress=9)