How to use the h2o.init function in h2o

To help you get started, we’ve selected a few h2o examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github h2oai / h2o-2 / py / testdir_multi_jvm / test_poisson_covtype20x.py View on Github external
    @classmethod
    def setUpClass(cls):
        h2o.init(3,java_heap_GB=4)
github h2oai / h2o-2 / py / testdir_multi_jvm / test_many_fp_formats_libsvm_fvec.py View on Github external
    @classmethod
    def setUpClass(cls):
        global SEED
        SEED = h2o.setup_random_seed()
        h2o.init(2,java_heap_GB=5)
github h2oai / h2o-2 / py / testdir_ec2_only / test_GLM_covtype20x_s3n_thru_hdfs.py View on Github external
    @classmethod
    def setUpClass(cls):
        # assume we're at 0xdata with it's hdfs namenode
        h2o.init(1)
github h2oai / h2o-2 / py / testdir_single_jvm / test_exec2_quantile_bin_defeat.py View on Github external
    @classmethod
    def setUpClass(cls):
        global SEED
        SEED = h2o.setup_random_seed()
        h2o.init(1, java_heap_GB=1)
github h2oai / h2o-2 / py / testdir_multi_jvm / test_KMeans_sphere3_fvec.py View on Github external
    @classmethod
    def setUpClass(cls):
        global SEED
        SEED = h2o.setup_random_seed()
        h2o.init(3)
github h2oai / h2o-2 / py / testdir_ec2_slow / test_parse_nflx_loop_s3n_hdfs.py View on Github external
for i, (csvFolderList, csvFilepattern, csvFilename, totalBytes, timeoutSecs) in enumerate(csvFilenameList):

            bucket = "home-0xdiag-datasets"
            ## for tryHeap in [54, 28]:
            h2oPerNode = 1
            # h1.4xlarge 60.5GB dram
            for tryHeap in [28]:
                if USE_S3:
                    protocol = "s3"
                else:
                    protocol = "s3n"
                print "\n", tryHeap,"GB heap,", h2oPerNode, "jvm per host, import", protocol, "then parse"
                
                # jea = "-XX:+UseParNewGC -XX:+UseConcMarkSweepGC"
                # jea = "-Dh2o.find-ByteBuffer-leaks=true"
                h2o.init(h2oPerNode, java_heap_GB=tryHeap, enable_benchmark_log=True, timeoutSecs=120, retryDelaySecs=10)
                # java_extra_args=jea,

                # don't raise exception if we find something bad in h2o stdout/stderr?
                h2o.nodes[0].sandboxIgnoreErrors = True

                for trial in range(trialMax):
                    # import a list of folders, one at a time (hdfs import can't take pattern match
                    # want to be able to parse 800 files, but only 200 per folder. Don't want to import the full bucket
                    # too slow
                    for csvFolder in csvFolderList:
                        # since we delete the key, we have to re-import every iteration, to get it again
                        # s3n URI thru HDFS is not typical.
                        if USE_S3:
                            (importResult, importPattern) = h2i.import_only(
                                bucket=bucket, path=csvFolder + "/" + csvFilepattern, schema='s3')
                        else:
github h2oai / h2o-2 / py / testdir_single_jvm / test_summary2_percentile.py View on Github external
    @classmethod
    def setUpClass(cls):
        global SEED
        SEED = h2o.setup_random_seed()
        h2o.init()
github h2oai / h2o-2 / py / testdir_multi_jvm / test_cloud_5_loop.py View on Github external
def test_Cloud(self):
        # FIX! weird timeout H2O exceptions with >8? maybe shouldn't
        # don't know if we care
        ports_per_node = 2
        tryNodes = 5
        for trial in range(10):
            h2o.verboseprint("Trying cloud of", tryNodes)
            sys.stdout.write('.')
            sys.stdout.flush()

            start = time.time()
            h2o.init(tryNodes, retryDelaySecs=2, timeoutSecs=max(30,10*tryNodes), java_heap_GB=1)
            print "trial #%d: Build cloud of %d in %d secs" % (trial, tryNodes, (time.time() - start))

            h2o.verify_cloud_size()
            time.sleep(5)
            h2o.tear_down_cloud()
if __name__ == '__main__':