Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
elif '_deleteItem' in request.POST:
# gat form
formDelete = deleteAnomalyForm(request.POST)
if formDelete.is_valid():
ID_delete = str(formDelete.cleaned_data['ID'])
# delete dataset
try:
c.execute('DELETE FROM nirspecAnomaly WHERE id=' + ID_delete)
except:
print(request, 'Delete ID=' + ID_delete + ' failed')
conn.commit()
sql.close_connection(conn)
# overrite form with empty form
formAdd = addAnomalyForm_nir()
formDelete = deleteAnomalyForm()
# get bokeh tabs
template = "nirspec_data_trending.html"
dash = nirspec_trending()
# define html context
context = {
'dashboard': dash,
'inst': '', # Leave as empty string or instrument name; Required for navigation bar
'inst_list': JWST_INSTRUMENT_NAMES_MIXEDCASE, # Do not edit; Required for navigation bar
'tools': MONITORS, # Do not edit; Required for navigation bar
'user': None, # Do not edit; Required for authentication
------
plot_data : list
A list containing the JavaScript and HTML content for the dashboard
variables : dict
no use
"""
__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
PACKAGE_DIR = __location__.split('instrument_monitors')[0]
# connect to database
# DATABASE_LOCATION = os.path.join(get_config()['jwql_dir'], 'database')
DATABASE_LOCATION = os.path.join(PACKAGE_DIR, 'database')
DATABASE_FILE = os.path.join(DATABASE_LOCATION, 'miri_database.db')
conn = sql.create_connection(DATABASE_FILE)
# some variables can be passed to the template via following
variables = dict(init=1)
# some variables can be passed to the template via following
variables = dict(init=1)
# add tabs to dashboard
tab1 = power_plots(conn, start, end)
tab2 = volt_plots(conn, start, end)
tab3 = fpe_plots(conn, start, end)
tab4 = temperature_plots(conn, start, end)
tab5 = bias_plots(conn, start, end)
tab6 = wheel_ratios(conn, start, end)
# build dashboard
for pos in mn.fw_positions:
try:
data = FW[pos]
for element in data:
sql.add_wheel_data(conn, 'IMIR_HK_FW_POS_RATIO_{}'.format(pos), element)
except KeyError:
pass
# Wheel GW
for pos in mn.gw_positions:
try:
data_GW14 = GW14[pos]
data_GW23 = GW23[pos]
for element in data_GW14:
sql.add_wheel_data(conn, 'IMIR_HK_GW14_POS_RATIO_{}'.format(pos), element)
for element in data_GW23:
sql.add_wheel_data(conn, 'IMIR_HK_GW23_POS_RATIO_{}'.format(pos), element)
except KeyError:
pass
# Wheel CCC
for pos in mn.ccc_positions:
try:
data = CCC[pos]
for element in data:
sql.add_wheel_data(conn, 'IMIR_HK_CCC_POS_RATIO_{}'.format(pos), element)
except KeyError:
pass
def main():
#generate paths
DATABASE_LOCATION = os.path.join(get_config()['jwql_dir'], 'database')
DATABASE_FILE = os.path.join(DATABASE_LOCATION, 'miri_database.db')
#connect to temporary database
conn = sql.create_connection(DATABASE_FILE)
#process every csv file in directory folder
for path in paths:
process_file(conn, path)
#close connection
sql.close_connection(conn)
print("done")
def main():
db_file = "miri_database.db"
conn = sql.create_connection(db_file)
plot_volt4(conn, "output/volt4.html")
sql.close_connection(conn)
print('end')
#process raw data with once a day routine
processed_data = once_a_day_routine(m_raw_data)
#push extracted and filtered data to temporary database
for key, value in processed_data.items():
#abbreviate data table
m = m_raw_data.mnemonic(key)
if key == "SE_ZIMIRICEA":
length = len(value)
mean = statistics.mean(value)
deviation = statistics.stdev(value)
dataset = (float(m.meta['start']), float(m.meta['end']), length, mean, deviation)
sql.add_data(conn, "SE_ZIMIRICEA_IDLE", dataset)
elif key == "IMIR_HK_ICE_SEC_VOLT4":
length = len(value)
mean = statistics.mean(value)
deviation = statistics.stdev(value)
dataset = (float(m.meta['start']), float(m.meta['end']), length, mean, deviation)
sql.add_data(conn, "IMIR_HK_ICE_SEC_VOLT4_IDLE", dataset)
else:
length = len(value)
mean = statistics.mean(value)
deviation = statistics.stdev(value)
dataset = (float(m.meta['start']), float(m.meta['end']), length, mean, deviation)
sql.add_data(conn, key, dataset)