Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
if store_name == None:
if not 'add_info' in kwargs:
kwargs['add_info'] = self.add_info
if not 'add_date' in kwargs:
kwargs['add_date'] = self.add_date
file = spectrum.store(self.path, compress=compress, **kwargs)
else:
file = spectrum.store(self.path + '/' + str(store_name), compress=compress, **kwargs)
# Note we could have added the Spectrum directly
# (saves the load stage) but it also serves to
# check the file we just stored is readable
# ... input is a file name. Copy it in database and load it
elif isinstance(spectrum, string_types):
if not exists(spectrum):
raise FileNotFoundError(
'File doesnt exist: {0}'.format(spectrum))
fd, name = split(spectrum)
# Assert a similar case name is not in database already
if spectrum in list(self.df['file']):
raise ValueError('File already in database: {0}. Database filenames should be unique'.format(
spectrum))
if abspath(fd) != abspath(self.path):
# Assert file doesnt exist in database already
if name in os.listdir(self.path):
raise ValueError('File already in database folder: {0}'.format(name) +
'. Use db.update() if you added it there manually')
# Ok. Copy it.
file = join(self.path, name)
def _reload_databank(self):
''' In save_memory mode we're trying to save RAM so reference dataframe (df0)
will be deleted after scaled database (df1) is created. This makes it
impossible to calculate another spectrum afterwards, without reloading
the database: in that case, we have kept the temporary file for some time
and try to regenerate df0 here '''
path = self._get_temp_file()
if not exists(path):
raise FileNotFoundError(
'temp file not present. Cant reload database')
dbformat = self.params.dbformat
db_use_cached = self.params.db_use_cached
db_assumed_sorted = self.db_assumed_sorted
levelsfmt = self.params.levelsfmt
t0 = time()
self.df0 = self._load_databank([path], dbformat, levelsfmt,
db_use_cached=db_use_cached,
db_assumed_sorted=db_assumed_sorted,
buffer='direct')
if __debug__:
printdbg(
'Databank reloaded from temporary h5 file in {0:.2f}s'.format(time()-t0))
raise ValueError('Database format ({0}) not in known list: {1}'.format(
dbformat, KNOWN_DBFORMAT))
if levels is not None and levelsfmt not in KNOWN_LVLFORMAT:
raise ValueError('Energy level format ({0}) not in known list: {1}'.format(
levelsfmt, KNOWN_LVLFORMAT))
if parfuncfmt not in [None]+KNOWN_PARFUNCFORMAT:
raise ValueError('Partition function format ({0}) not in known list: {1}'.format(
parfuncfmt, KNOWN_PARFUNCFORMAT))
# Check all path exists
# ... remove empty paths first
path = [p for p in path if p != '']
# ... test paths
for p in path:
if not exists(p):
raise FileNotFoundError('databank lines file: `{0}`'.format(p))
if levels is not None:
for iso, lvl in levels.items(): # one file per isotope
if not exists(lvl):
raise FileNotFoundError('levels = `{0}`'.format(lvl))
if parfunc is not None: # all isotopes in same file?
if not exists(parfunc):
raise FileNotFoundError('parfunc = `{0}`'.format(parfunc))
# Get cache
if db_use_cached is None:
db_use_cached = self.params.db_use_cached
return (name, path, dbformat, parfunc, parfuncfmt, levels, levelsfmt,
db_use_cached, db_assumed_sorted, drop_columns, buffer, load_energies)
def addDatabankEntries(dbname, dict_entries, verbose=True):
''' Add database dbname with entries from dict_entries. If database
already exists in ~/.radis, raises an error
'''
# Get ~/.radis if exists, else create it
try:
dbnames = getDatabankList()
except FileNotFoundError:
# generate ~/.radis:
dbnames = []
open(CONFIG_PATH, 'a').close()
if verbose:
print('Created ~/.radis in {0}'.format(dirname(CONFIG_PATH)))
# Check database doesnt exist
if dbname in dbnames:
raise ValueError('Database already exists: {0}'.format(dbname) +
'. Cant add it')
# Add entries to parser
config = configparser.ConfigParser()
config[dbname] = {}
if 'info' in dict_entries: # optional
def getConfig():
''' Read config file and returns it
Config file name is harcoded: `~/.radis`
'''
config = configparser.ConfigParser()
configpath = CONFIG_PATH
# Test ~/.radis exists
if not exists(configpath):
raise FileNotFoundError("Create a `.radis` file in {0} to store links to ".format(
dirname(configpath)) +
"your local databanks. Format must be:\n {0}".format(
DBFORMAT) +
"\n(it can be empty too)")
config.read(configpath)
return config
#
levelsfmt, KNOWN_LVLFORMAT))
if parfuncfmt not in [None]+KNOWN_PARFUNCFORMAT:
raise ValueError('Partition function format ({0}) not in known list: {1}'.format(
parfuncfmt, KNOWN_PARFUNCFORMAT))
# Check all path exists
# ... remove empty paths first
path = [p for p in path if p != '']
# ... test paths
for p in path:
if not exists(p):
raise FileNotFoundError('databank lines file: `{0}`'.format(p))
if levels is not None:
for iso, lvl in levels.items(): # one file per isotope
if not exists(lvl):
raise FileNotFoundError('levels = `{0}`'.format(lvl))
if parfunc is not None: # all isotopes in same file?
if not exists(parfunc):
raise FileNotFoundError('parfunc = `{0}`'.format(parfunc))
# Get cache
if db_use_cached is None:
db_use_cached = self.params.db_use_cached
return (name, path, dbformat, parfunc, parfuncfmt, levels, levelsfmt,
db_use_cached, db_assumed_sorted, drop_columns, buffer, load_energies)
parfuncfmt, KNOWN_PARFUNCFORMAT))
# Check all path exists
# ... remove empty paths first
path = [p for p in path if p != '']
# ... test paths
for p in path:
if not exists(p):
raise FileNotFoundError('databank lines file: `{0}`'.format(p))
if levels is not None:
for iso, lvl in levels.items(): # one file per isotope
if not exists(lvl):
raise FileNotFoundError('levels = `{0}`'.format(lvl))
if parfunc is not None: # all isotopes in same file?
if not exists(parfunc):
raise FileNotFoundError('parfunc = `{0}`'.format(parfunc))
# Get cache
if db_use_cached is None:
db_use_cached = self.params.db_use_cached
return (name, path, dbformat, parfunc, parfuncfmt, levels, levelsfmt,
db_use_cached, db_assumed_sorted, drop_columns, buffer, load_energies)
if file is None:
file = join(self.path, basename(self.path)+'.csv')
if len(self) > 0:
try:
self.see().to_csv(file)
except PermissionError:
warn('Database index could not be updated: {0}'.format(
sys.exc_info()[1]))
else:
try:
os.remove(file) # if database existed but files were deleted
except PermissionError:
warn('Database index could not be updated: {0}'.format(
sys.exc_info()[1]))
except FileNotFoundError:
pass