Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def start(self, ready=None):
"""
Starts the actionhandler to execute actions if pastes are matched
:param ready: Event to check from the outside if the actionhandler has been started
:return: None
"""
with self.__lock:
if not self.running:
self.running = True
thread = start_thread(self._start, "ActionHandler", self.__exception_event)
self.__threads.append(thread)
if ready is not None:
ready.set()
def start(self, workers=4, ready=None):
"""Starts dispatching the downloaded pastes to the list of analyzers"""
with self.__lock:
if not self.running:
if len(self.analyzers) == 0:
self.logger.warning("No analyzers added! At least one analyzer must be added prior to use!")
return None
self.running = True
thread = start_thread(self._start_analyzing, "PasteDispatcher", exception_event=self.__exception_event)
self.__threads.append(thread)
# Start thread pool with worker threads
# for i in range(workers):
# thread = Thread(target=self._pool_thread, name="analyzer_{0}".format(i))
# self.__thread_pool.add(thread)
# thread.start()
if ready is not None:
ready.set()
return self.action_queue
def start(self):
"""Starts scraping pastes from the provided sources"""
with self.__lock:
if not self.running:
# There needs to be at least one scraper
if len(self.scrapers) == 0:
self.logger.warning("No scrapers added! At least one scraper must be added prior to use!")
return None
self.running = True
# Start all scraper threads
for scraper in self.scrapers:
thread = start_thread(scraper.start, scraper.name, paste_queue=self.paste_queue, exception_event=self.__exception_event)
self.__threads.append(thread)
# Return the update queue so the main thread can insert updates
return self.paste_queue
def _start_analyzing(self):
while self.running:
try:
# Get paste from queue
paste = self.paste_queue.get(True, 1)
# TODO implement thread pool to limit number of parallel executed threads
# Don't add these threads to the list. Otherwise they will just block the list
start_thread(self._process_paste, "process_paste", paste=paste, exception_event=self.__exception_event)
except Empty:
if self.__stop_event.is_set():
self.logger.debug("orderly stopping")
self.running = False
break
elif self.__exception_event.is_set():
self.logger.critical("stopping due to exception in another thread")
self.running = False
break
continue
def start(self, paste_queue):
"""Start the scraping process and download the paste metadata"""
self.paste_queue = paste_queue
self.running = True
start_thread(self._body_downloader, "BodyDownloader", self._exception_event)
while self.running:
self._last_scrape_time = int(time.time())
pastes = self._get_recent(limit=100)
counter = 0
if pastes is not None:
for paste in pastes:
# check if paste is in list of known pastes
if paste.key in self._known_pastes:
# Do nothing, if it's already known
continue
self.logger.debug("Paste is unknown - adding ot to list {}".format(paste.key))
self._tmp_paste_queue.put(paste)
self._known_pastes.append(paste.key)
def start(self):
with self._server_lock:
if self.is_running:
return
self._api_thread = start_thread(self._start_server, "sanicAPI", Event())
self.is_running = True