Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
# delete field if no badges?
try:
if remove_val:
response = ff_utils.patch_metadata({"badges": remove_val}, remove_key, key=ff_keys)
else:
response = ff_utils.patch_metadata({}, remove_key + '?delete_fields=badges', key=ff_keys)
if response['status'] == 'success':
patches['remove_badge_success'].append(remove_key)
else:
patches['remove_badge_failure'].append(remove_key)
except Exception:
patches['remove_badge_failure'].append(remove_key)
if len(output_keys) > 2:
for edit_key, edit_val in full_output[output_keys[2]].items():
try:
response = ff_utils.patch_metadata({"badges": edit_val}, edit_key, key=ff_keys)
if response['status'] == 'success':
patches['edit_badge_success'].append(edit_key)
else:
patches['edit_badge_failure'].append(edit_key)
except Exception:
patches['edit_badge_failure'].append(edit_key)
return patches
expset_patch = True
matching_title_filegroups[0]["higlass_view_config"] = higlass_item_results["item_uuid"]
new_viewconfs[title] = higlass_item_results["item_uuid"]
number_of_posted_viewconfs += 1
if expset_patch:
# The other_processed_files section has been updated. Patch the changes.
try:
# Make sure all higlass_view_config fields just show the uuid.
for g in [ group for group in expsets_to_update[accession]["other_processed_files"] if "higlass_view_config" in group ]:
if isinstance(g["higlass_view_config"], dict):
uuid = g["higlass_view_config"]["uuid"]
g["higlass_view_config"] = uuid
ff_utils.patch_metadata(
{'other_processed_files': expsets_to_update[accession]["other_processed_files"]},
obj_id=accession,
key=connection.ff_keys
)
number_of_viewconfs_updated += number_of_posted_viewconfs
except Exception as e:
if accession not in action_logs['failed_to_patch_expset']:
action_logs['failed_to_patch_expset'][accession] = {}
if title not in action_logs['failed_to_patch_expset'][accession]:
action_logs['failed_to_patch_expset'][accession][title] = {}
action_logs['failed_to_patch_expset'][accession][title] = str(e)
continue
else:
number_of_viewconfs_updated += number_of_posted_viewconfs
# Success. Note which titles link to which HiGlass view configs.
if common:
log.append('some files ({}) are already in other_processed_files filed for {}'.format(common, acc))
continue
source_status = resp['status']
# if move_to_pc is set to true, but the source status is released/to project
# set it back to finalize_user_pending_labs
if source_status in ['released', 'released to project']:
move_to_pc = False
# if move_to_pc is true, add them to processed_files
if move_to_pc:
# at this step we expect processed_files field to be empty
if ex_pc:
log.append('expected processed_files to be empty: {}'.format(acc))
continue
# patch the processed files field
ff_utils.patch_metadata({'processed_files': list_pc}, obj_id=acc, key=auth)
# if not move_to_pc, add files to opf with proper title
else:
# we need raw to get the existing piece, to patch back with the new ones
if ex_opc:
patch_val = ff_utils.get_metadata(acc, key=auth, add_on='frame=raw').get('other_processed_files', [])
else:
patch_val = []
new_data = {'title': pc_set_title,
'type': 'preliminary',
'files': list_pc}
patch_val.append(new_data)
patch_body = {'other_processed_files': patch_val}
ff_utils.patch_metadata(patch_body, obj_id=acc, key=auth)
# add the tag
set_acc = patch_data['add_tag'][0]
continue
try:
response = ff_utils.patch_metadata({"badges": badges}, add_key[1:], key=ff_keys)
if response['status'] == 'success':
patches['add_badge_success'].append(add_key)
else:
patches['add_badge_failure'].append(add_key)
except Exception:
patches['add_badge_failure'].append(add_key)
for remove_key, remove_val in full_output[output_keys[1]].items():
# delete field if no badges?
try:
if remove_val:
response = ff_utils.patch_metadata({"badges": remove_val}, remove_key, key=ff_keys)
else:
response = ff_utils.patch_metadata({}, remove_key + '?delete_fields=badges', key=ff_keys)
if response['status'] == 'success':
patches['remove_badge_success'].append(remove_key)
else:
patches['remove_badge_failure'].append(remove_key)
except Exception:
patches['remove_badge_failure'].append(remove_key)
if len(output_keys) > 2:
for edit_key, edit_val in full_output[output_keys[2]].items():
try:
response = ff_utils.patch_metadata({"badges": edit_val}, edit_key, key=ff_keys)
if response['status'] == 'success':
patches['edit_badge_success'].append(edit_key)
else:
patches['edit_badge_failure'].append(edit_key)
except Exception:
patches['edit_badge_failure'].append(edit_key)
patched_static_content = static_content_section
# Look through the static content to see if this section exists already.
reuse_existing = False
for sc in patched_static_content:
if sc["description"] == "auto_generated_higlass_view_config":
sc.update(new_sc_section)
reuse_existing = True
break
# If there is no existing Higlass static content, add the new content to the existing static_content
if not reuse_existing:
patched_static_content = static_content_section + [new_sc_section]
try:
ff_utils.patch_metadata(
{'static_content': patched_static_content},
obj_id=item_uuid,
key=connection.ff_keys
)
except Exception as e:
return False, str(e)
return True, ""
def patch_workflow_run_to_deleted(connection, **kwargs):
action = ActionResult(connection, 'patch_workflow_run_to_deleted')
check_res = action.get_associated_check_result(kwargs)
action_logs = {'patch_failure': [], 'patch_success': []}
my_key = connection.ff_keys
for a_case in check_res['full_output']['problematic_wfrs']:
wfruid = a_case[1]
del_list = a_case[2]
patch_data = {'status': 'deleted'}
for delete_me in del_list:
try:
ff_utils.patch_metadata(patch_data, obj_id=delete_me, key=my_key)
except Exception as e:
acc_and_error = [delete_me, str(e)]
action_logs['patch_failure'].append(acc_and_error)
else:
action_logs['patch_success'].append(wfruid + " - " + delete_me)
action.output = action_logs
action.status = 'DONE'
if action_logs.get('patch_failure'):
action.status = 'FAIL'
return action
"numberType": "percent"})
qc_summary.append({"title": "Nonredundant Read Fraction (NRF)",
"value": str(round2(quality_metric[pref + "pbc_qc"][0]["NRF"])),
"tooltip": "distinct non-mito read pairs / total non-mito read pairs",
"numberType": "float"})
qc_summary.append({"title": "PCR Bottleneck Coefficient (PBC)",
"value": str(round2(quality_metric[pref + "pbc_qc"][0]["PBC1"])),
"tooltip": "one-read non-mito read pairs / distinct non-mito read pairs",
"numberType": "float"})
final_reads = quality_metric[pref + "nodup_flagstat_qc"][0]["read1"] # PE
if not final_reads:
final_reads = quality_metric[pref + "nodup_flagstat_qc"][0]["total"] # SE
qc_summary.append({"title": "Filtered & Deduped Reads",
"value": str(final_reads),
"numberType": "integer"})
ff_utils.patch_metadata({'quality_metric_summary': qc_summary}, file_uuid, key=key)
return qc_summary
total_patches = headers_check_result['full_output']['to_add']
total_patches.update(headers_check_result['full_output']['to_remove'])
for item, headers in total_patches.items():
# if all headers are deleted, use ff_utils.delete_field
if headers == []:
try:
ff_utils.delete_field(item, 'static_headers', key=connection.ff_keys)
except Exception as e:
patch_error = '\n'.join([item, str(e)])
action_logs['patch_failure'].append(patch_error)
else:
action_logs['patch_success'].append(item)
else:
patch_data = {'static_headers': headers}
try:
ff_utils.patch_metadata(patch_data, obj_id=item, key=connection.ff_keys)
except Exception as e:
patch_error = '\n'.join([item, str(e)])
action_logs['patch_failure'].append(patch_error)
else:
action_logs['patch_success'].append(item)
action.status = 'DONE'
action.output = action_logs
qc_summary.append({"title": "Filtered Reads",
"value": str(quality_metric["Total reads"]),
"numberType": "integer"})
qc_summary.append({"title": "Cis reads (>20kb)",
"value": str(percent(quality_metric["Cis reads (>20kb)"])),
"tooltip": tooltip(quality_metric["Cis reads (>20kb)"]),
"numberType": "percent"})
qc_summary.append({"title": "Short cis reads",
"value": str(percent(quality_metric["Short cis reads (<20kb)"])),
"tooltip": tooltip(quality_metric["Short cis reads (<20kb)"]),
"numberType": "percent"})
qc_summary.append({"title": "Trans Reads",
"value": str(percent(quality_metric["Trans reads"])),
"tooltip": tooltip(quality_metric["Trans reads"]),
"numberType": "percent"})
res = ff_utils.patch_metadata({'quality_metric_summary': qc_summary}, file_uuid, key=key)
return res