How to use the pyxrf.model.fileio.read_hdf_APS function in pyxrf

To help you get started, we’ve selected a few pyxrf examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github NSLS-II / PyXRF / pyxrf / model / command_tools.py View on Github external
if os.path.isabs(fln):
                f = fln
            else:
                f = os.path.join(working_directory, fln)
            try:
                param_quant_analysis.load_entry(f)
                quant_norm = True
                logger.info(f"Quantitative calibration is loaded successfully from file '{f}'")
            except Exception as ex:
                logger.error(f"Error occurred while loading quantitative calibration from file '{f}': {ex}")

    t0 = time.time()
    prefix_fname = file_name.split('.')[0]
    if fit_channel_sum is True:
        if data_from == 'NSLS-II':
            img_dict, data_sets, mdata = read_hdf_APS(working_directory, file_name,
                                                      spectrum_cut=spectrum_cut,
                                                      load_each_channel=False)
        elif data_from == '2IDE-APS':
            img_dict, data_sets, mdata = read_MAPS(working_directory,
                                                   file_name, channel_num=1)
        else:
            print('Unknown data sets.')

        try:
            data_all_sum = data_sets[prefix_fname+'_sum'].raw_data
        except KeyError:
            data_all_sum = data_sets[prefix_fname].raw_data

        # load param file
        if not os.path.isabs(param_file_name):
            param_path = os.path.join(working_directory, param_file_name)
github NSLS-II / PyXRF / pyxrf / xanes_maps / xanes_maps_api.py View on Github external
indicates if fit results should be loaded. If set to False, then only metadata
        is loaded and output dictionary ``scan_img_dict`` is empty.
    """

    # The list of file names
    files_h5 = [fl.name for fl in os.scandir(path=wd_xrf) if fl.name.lower().endswith(".h5")]
    # Sorting file names will make loading a little more orderly, but generally file names
    #   can be arbitrary (scan ID is not extracted from file names)
    files_h5.sort()

    scan_ids = []
    scan_energies = []
    scan_img_dict = []
    for fln in files_h5:
        img_dict, _, mdata = \
            read_hdf_APS(working_directory=wd_xrf, file_name=fln, load_summed_data=load_fit_results,
                         load_each_channel=False, load_processed_each_channel=False,
                         load_raw_data=False, load_fit_results=load_fit_results,
                         load_roi_results=False)

        if "scan_id" not in mdata:
            logger.error(f"Metadata value 'scan_id' is missing in data file '{fln}': "
                         " the file was not loaded.")
            continue

        # Make sure that the scan ID is in the specified range (if the range is
        #   not specified, then process all data files
        if (start_id is not None) and (mdata["scan_id"] < start_id) or \
                (end_id is not None) and (mdata["scan_id"] > end_id):
            continue

        if "instrument_mono_incident_energy" not in mdata:
github NSLS-II / PyXRF / pyxrf / model / command_tools.py View on Github external
if save_tiff is True:
            output_folder = 'output_tiff_'+prefix_fname
            output_path = os.path.join(working_directory, output_folder)
            output_data(output_dir=output_path,
                        interpolate_to_uniform_grid=interpolate_to_uniform_grid,
                        dataset_name="dataset_fit",  # Sum of all detectors: should end with '_fit'
                        quant_norm=quant_norm,
                        param_quant_analysis=param_quant_analysis,
                        dataset_dict=dataset, positions_dict=positions_dict,
                        file_format="tiff", scaler_name=scaler_name,
                        scaler_name_list=scaler_name_list,
                        use_average=use_average)

    if fit_channel_each:
        img_dict, data_sets, mdata = read_hdf_APS(working_directory, file_name,
                                                  spectrum_cut=spectrum_cut,
                                                  load_each_channel=True)

        # Find the detector channels and the names of the channels
        det_channels = [_ for _ in data_sets.keys() if re.search(r"_det\d+$", _)]
        det_channel_names = [re.search(r"det\d+$", _)[0] for _ in det_channels]
        if param_channel_list is None:
            param_channel_list = [param_file_name] * 3
        else:
            if not isinstance(param_channel_list, list) and not isinstance(param_channel_list, tuple):
                raise RuntimeError("Parameter 'param_channel_list' must be a list or a tuple of strings")
            if len(param_channel_list) != len(det_channels):
                raise RuntimeError(f"Parameter 'param_channel_list' must be 'None' "
                                   f"or contain {len(det_channels)} file names.")

        channel_num = len(param_channel_list)