Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def start_flickr_crawler(path:Path, search_text:str, num_images:int, apikey:str):
"""Kicks off a Flickr download. Requires an apikey"""
assert apikey != None, "Flickr requires an apikey: 'https://www.flickr.com/services/api/misc.api_keys.html'"
crawler = FlickrImageCrawler(
apikey,
feeder_threads=2,
parser_threads=2,
downloader_threads=8,
storage={'root_dir': path})
crawler.crawl(tags=search_text, max_num=num_images, tag_mode='all')
os.makedirs(argv[1])
#crawler = GoogleImageCrawler(storage = {"root_dir" : argv[1]})
crawler = GoogleImageCrawler(storage={'root_dir': f'{argv[1]}/google'})
crawler.crawl(keyword = argv[2], max_num = 10000, min_size=(200,200), max_size=None)
#bing_crawler = BingImageCrawler(storage = {"root_dir" : argv[1]})
bing_crawler = BingImageCrawler(storage={'root_dir': f'{argv[1]}/bing'})
bing_crawler.crawl(keyword=argv[2], max_num = 10000, min_size=(200,200), max_size=None)
#baidu_crawler = BaiduImageCrawler(storage = {"root_dir" : argv[1]})
baidu_crawler = BaiduImageCrawler(storage={'root_dir': f'{argv[1]}/baidu'})
baidu_crawler.crawl(keyword=argv[2], max_num = 10000, min_size=(200,200), max_size=None)
flickr_crawler = FlickrImageCrawler(storage={'root_dir': f'{argv[1]}/flickr'})
flickr_crawler.crawl(keyword=argv[2], max_num = 10000, min_size=(200,200), max_size=None)