Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
description = request.form['description']
static = request.form['static'] if 'static' in request.form else None
show_on_stories = request.form['showOnStories'] if 'showOnStories' in request.form else None
show_on_media = request.form['showOnMedia'] if 'showOnMedia' in request.form else None
source_ids = []
if len(request.form['sources[]']) > 0:
source_ids = request.form['sources[]'].split(',')
formatted_name = format_name_from_label(label)
# first create the collection
new_collection = user_mc.createTag(TAG_SETS_ID_COLLECTIONS, formatted_name, label, description,
is_static=(static == 'true'),
show_on_stories=(show_on_stories == 'true'),
show_on_media=(show_on_media == 'true'))
# then go through and tag all the sources specified with the new collection id
tags = [MediaTag(sid, tags_id=new_collection['tag']['tags_id'], action=TAG_ACTION_ADD) for sid in source_ids]
if len(tags) > 0:
user_mc.tagMedia(tags)
return jsonify(new_collection['tag'])
mkey = list(m.keys())[0]
tag_codes = tags_in_tag_set(TOOL_API_KEY, mid)
for source in source_list:
if mkey in source:
metadata_tag_name = source[mkey]
if metadata_tag_name not in ['', None]:
# hack until we have a better match check
if mkey == METADATA_PUB_COUNTRY_NAME: # template pub_###
matching = [t for t in tag_codes if t['tag'] == 'pub_' + metadata_tag_name]
else:
matching = [t for t in tag_codes if t['tag'] == metadata_tag_name]
if matching and matching not in ['', None]:
metadata_tag_id = matching[0]['tags_id']
logger.debug('found metadata to add %s', metadata_tag_id)
tags.append(MediaTag(source['media_id'], tags_id=metadata_tag_id, action=TAG_ACTION_ADD))
# now do all the tags in parallel batches so it happens quickly
if len(tags) > 0:
chunks = [tags[x:x + 50] for x in range(0, len(tags), 50)] # do 50 tags in each request
use_pool = False
if use_pool:
pool = Pool(processes=MEDIA_METADATA_UPDATE_POOL_SIZE) # process updates in parallel with worker function
pool.map(_tag_media_worker, chunks) # blocks until they are all done
pool.terminate() # extra safe garbage collection
else:
[_tag_media_worker(job) for job in chunks]
source_to_create = {
'name': name,
'url': url,
'editor_notes': editor_notes,
'public_notes': public_notes,
'is_monitored': monitored,
'tags_ids': tag_ids_to_add
}
result = user_mc.mediaCreate([source_to_create])[0] # need just the first entry, since we only create one
if result['status'] != "error":
# if it worked, update any metadata, because we need to remove the other tags in each set
for metadata_item in valid_metadata:
metadata_tag_id = request.form[metadata_item['form_key']] if metadata_item['form_key'] in request.form else None # this is optional
if metadata_tag_id:
user_mc.tagMedia(
tags=[MediaTag(result['media_id'], tags_id=metadata_tag_id, action=TAG_ACTION_ADD)],
clear_others=True) # make sure to clear any other values set in this metadata tag set
tag_ids_to_add.append(metadata_tag_id)
if result['status'] == 'new':
# if it is a really new source, kick off a scraping job to find any RSS feeds
user_mc.feedsScrape(result['media_id'])
return jsonify(result)
]
for metadata_item in valid_metadata:
metadata_tag_id = request.form[metadata_item['form_key']] if metadata_item['form_key'] in request.form else None # this is optional
existing_tag_ids = [t for t in source['media_source_tags'] if is_metadata_tag_set(t['tag_sets_id'])]
# form field check
if metadata_tag_id in [None, '', 'null', 'undefined']:
# we want to remove it if there was one there
if len(existing_tag_ids) > 0:
for remove_if_empty in existing_tag_ids:
if metadata_item['tag_sets_id'] == remove_if_empty['tag_sets_id']:
tag = MediaTag(media_id, tags_id=remove_if_empty['tags_id'], action=TAG_ACTION_REMOVE)
user_mc.tagMedia([tag])
elif metadata_tag_id not in existing_tag_ids:
# need to add it and clear out the other
tag = MediaTag(media_id, tags_id=metadata_tag_id, action=TAG_ACTION_ADD)
user_mc.tagMedia([tag], clear_others=True)
# result the success of the media update call - would be better to catch errors in any of these calls...
return jsonify(result)