How to use the mpi4py.MPI.COMM_WORLD.recv function in mpi4py

To help you get started, we’ve selected a few mpi4py examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github jaredwo / topowx / twx / infill / mpi_infill_optim_tair.py View on Github external
print "SNOTEL/RAWS Coord: Done initialization. Starting to send work."
    
    cnt = 0
    nrec = 0
    
    for stn_id in fnl_stn_ids:
        
        for min_ngh in params[P_NGH_RNG]:
            
            for tair_var in ['tmin','tmax']:
                    
                if cnt < nwrkers:
                    dest = cnt+N_NON_WRKRS
                else:
                    dest = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG)
                    nrec+=1

                MPI.COMM_WORLD.send((stn_id,min_ngh,tair_var), dest=dest, tag=TAG_DOWORK)
                cnt+=1
    
    for w in np.arange(nwrkers):
        MPI.COMM_WORLD.send((None,None,None), dest=w+N_NON_WRKRS, tag=TAG_STOPWORK)
        
    print "coord_proc: done"
github Lab-Work / taxisim / mpi_parallel / LoadBalancedProcessTree.py View on Github external
def chunk_recv(source, unpickle_it=True):
    chunks = []
    status = MPI.Status()
    # Keep receiving messages until [[MSGOVER]] is received
    while(True):
        msg = MPI.COMM_WORLD.recv(source=source, status=status)
        
        # If we are listening to ANY_SOURCE, receive the remainder of messages
        # from the SAME source as the first message (prevent interleaving)
        if(source==MPI.ANY_SOURCE):
            source = status.Get_source()
        # print ("----- %d received msg of size %d" % (MPI.COMM_WORLD.Get_rank(), len(msg)))
        
        # If the special [[MSG_OVER]] string is received, we are done
        if(msg=="[[MSG_OVER]]"):
            break
        
        # Otherwise, add the string to the list of received strings
        chunks.append(msg)
    
    # Concatenate the strings, then unpickle
    pickled_obj = "".join(chunks)
github jaredwo / topowx / twx / interp / mpi_optim_params_tairanom.py View on Github external
def proc_work(params,rank):
    
    status = MPI.Status()
    
    optim = XvalTairAnom(params[P_PATH_DB], params[P_VARNAME])
        
    bcast_msg = None
    bcast_msg = MPI.COMM_WORLD.bcast(bcast_msg, root=RANK_COORD)    
    print "".join(["Worker ",str(rank),": Received broadcast msg"])
    
    while 1:
    
        stn_id = MPI.COMM_WORLD.recv(source=RANK_COORD,tag=MPI.ANY_TAG,status=status)
        
        if status.tag == TAG_STOPWORK:
            MPI.COMM_WORLD.send([None]*4, dest=RANK_WRITE, tag=TAG_STOPWORK)
            print "".join(["Worker ",str(rank),": Finished"]) 
            return 0
        else:
            
            try:
                
                bias,mae,r2 = optim.run_xval(stn_id, params[P_NGH_RNG])
                                            
            except Exception as e:
            
                print "".join(["ERROR: Worker ",str(rank),": could not xval ",stn_id,"...",str(e)])
                
                mae = np.ones((params[P_NGH_RNG].size,12))*netCDF4.default_fillvals['f8']
github jaredwo / topowx / scripts / step21_mpi_optim_nstns_norms.py View on Github external
ttl_xval_stns += stnids_climdiv.size
    
    print "WRITER: Output NCDF files created"
    
    stn_idxs = {}
    for x in np.arange(stns.size):
        stn_idxs[stns[STN_ID][x]] = x
            
    ttl_xvals = ttl_xval_stns
    
    stat_chk = StatusCheck(ttl_xvals, 10)
    
    while 1:
       
        stn_id, err = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE,
                                         tag=MPI.ANY_TAG, status=status)
        
        if status.tag == TAG_STOPWORK:
            
            nwrkrs_done += 1
            if nwrkrs_done == nwrkers:
                
                
                ######################################################
                print "WRITER: Setting the optim # of nghs..."
                                
                set_optim_nstns_tair_norm(stn_da, path_out_optim)
            
                ######################################################
                
                print "WRITER: Finished"
github jaredwo / topowx / twx / interp / mpi_xval_po.py View on Github external
stn_idxs = {}
    for x in np.arange(stn_ids.size):
        stn_idxs[stn_ids[x]] = x
    
    ngh_idxs = {}
    for x in np.arange(params[P_NGH_RNG].size):
        ngh_idxs[params[P_NGH_RNG][x]] = x
    
    ttl_xvals = params[P_NGH_RNG].size * stn_ids.size
    
    stat_chk = StatusCheck(ttl_xvals,1000)
    
    while 1:
       
        stn_id,min_ngh,hss = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE,tag=MPI.ANY_TAG,status=status)
        if status.tag == TAG_STOPWORK:
            
            nwrkrs_done+=1
            if nwrkrs_done == nwrkers:
                print "Writer: Finished"
                return 0
        else:
            
            dim1 = ngh_idxs[min_ngh]
            dim2 = stn_idxs[stn_id]
            
            ds.variables['hss'][dim1,dim2] = hss
            ds.sync()
            
            #print "|".join(["WRITER",stn_id,str(min_ngh),"%.4f"%(hss,)])
github jaredwo / topowx / twx / infill / mpi_infill_optim_tair_normals.py View on Github external
print "SNOTEL/RAWS Coord: Done initialization. Starting to send work."
    
    cnt = 0
    nrec = 0
    
    for stn_id in fnl_stn_ids:
        
        for min_ngh in params[P_NGH_RNG]:
            
            for tair_var in ['tmin','tmax']:
                    
                if cnt < nwrkers:
                    dest = cnt+N_NON_WRKRS
                else:
                    dest = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG)
                    nrec+=1

                MPI.COMM_WORLD.send((stn_id,min_ngh,tair_var), dest=dest, tag=TAG_DOWORK)
                cnt+=1
    
    for w in np.arange(nwrkers):
        MPI.COMM_WORLD.send((None,None,None), dest=w+N_NON_WRKRS, tag=TAG_STOPWORK)
        
    print "coord_proc: done"
github jaredwo / topowx / twx / infill / mpi_infill_prcp.py View on Github external
ds_prcp = Dataset("".join([params[P_PATH_OUT],'infill_prcp.nc']),'r+')
        ttl_infills = stnids_prcp.size
        stnids_prcp = np.array(ds_prcp.variables['stn_id'][:], dtype="
github jaredwo / topowx / scripts / step08_mpi_qa_stn_obs.py View on Github external
def proc_coord(twx_cfg, mask_stns, nwrkers):

    stndb = StationDataDb(twx_cfg.fpath_stndata_nc_all)
    stns = stndb.stns[mask_stns]

    cnt = 0
    nrec = 0

    for stn in stns:

        if cnt < nwrkers:
            dest = cnt + N_NON_WRKRS
        else:
            dest = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG)
            nrec += 1

        MPI.COMM_WORLD.send(stn[STN_ID], dest=dest, tag=TAG_DOWORK)
        cnt += 1

    for w in np.arange(nwrkers):
        MPI.COMM_WORLD.send(stn[STN_ID], dest=w + N_NON_WRKRS, tag=TAG_STOPWORK)
github jaredwo / topowx / twx / interp / mpi_interp_tair.py View on Github external
MPI.COMM_WORLD.bcast(atiler.build_tile_grid_info(), root=RANK_COORD)
    print "COORD: Starting to send work chunks to workers..."
    
    cnt = 0
    
    try:
    
        while 1:
            
            tile_num,wrk_chk = atiler.next()
            
            if cnt < nwrkers:
                dest = cnt+N_NON_WRKRS
            else:
                dest = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE, tag=MPI.ANY_TAG)
            
            cnt+=1
            
            MPI.COMM_WORLD.Send([wrk_chk,MPI.DOUBLE], dest=dest, tag=tile_num)
    
    except StopIteration:
        pass
        
    for w in np.arange(nwrkers):
        MPI.COMM_WORLD.Send([wrk_chk,MPI.DOUBLE], dest=w+N_NON_WRKRS, tag=TAG_STOPWORK)
    print "coord_proc: done"
github jaredwo / topowx / twx / interp / mpi_xval_tair_overall2.py View on Github external
stns = stn_da.stns[stn_mask]
    stn_da.ds.close()
    stn_da = None
    ds = Dataset(params[P_PATH_WRITEDB],'r+')
    
    mths = np.arange(12)
    
    mthNames = []
    for mth in mths:
        mthNames.append(get_norm_varname(mth+1))
    
    stat_chk = StatusCheck(stns.size,250)
    
    while 1:
       
        stn_id,tair_daily,tair_norms = MPI.COMM_WORLD.recv(source=MPI.ANY_SOURCE,tag=MPI.ANY_TAG,status=status)
        
        if status.tag == TAG_STOPWORK:
            
            nwrkrs_done+=1
            if nwrkrs_done == nwrkers:
                print "Writer: Finished"
                return 0
        else:
            
            x = np.nonzero(stn_ids==stn_id)[0][0]
            ds.variables[params[P_VARNAME]][:,x] = tair_daily
            
            for i in mths:
                ds.variables[mthNames[i]] = tair_norms[i]
            
            ds.sync()