How to use comiccrawler - 10 common examples

To help you get started, we’ve selected a few comiccrawler examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github eight04 / ComicCrawler / comiccrawler / mods / xznj120.py View on Github external
def get_images(html, url):
	script = re.search(r'', html).group(1)
	show_js_src = re.search(r'src="([^"]+?show\.\d+\.js[^"]*)', html).group(1)
	show_js = grabhtml(urljoin(url, show_js_src))
	real_pic_fn = re.search(r'(function f_qTcms_Pic_curUrl_realpic[\s\S]+?)function', show_js).group(1)
	code = """
	{script}
	{real_pic_fn}
	function base64_decode(data) {{
		return Buffer.from(data, "base64").toString();
	}}
	// m.wuyouhui.net/template/wap1/css/d7s/js/show.20170501.js?20190506201115
	Buffer.from(qTcms_S_m_murl_e, "base64")
		.toString()
		.split("$qingtiandy$")
		.filter(u => !/^(--|\+)/.test(u))
		.map(f_qTcms_Pic_curUrl_realpic);
	""".format(script=script, real_pic_fn=real_pic_fn)
	return [urljoin(url, i) for i in eval(code)]
github eight04 / ComicCrawler / comiccrawler / mods / xznj120.py View on Github external
show_js = grabhtml(urljoin(url, show_js_src))
	real_pic_fn = re.search(r'(function f_qTcms_Pic_curUrl_realpic[\s\S]+?)function', show_js).group(1)
	code = """
	{script}
	{real_pic_fn}
	function base64_decode(data) {{
		return Buffer.from(data, "base64").toString();
	}}
	// m.wuyouhui.net/template/wap1/css/d7s/js/show.20170501.js?20190506201115
	Buffer.from(qTcms_S_m_murl_e, "base64")
		.toString()
		.split("$qingtiandy$")
		.filter(u => !/^(--|\+)/.test(u))
		.map(f_qTcms_Pic_curUrl_realpic);
	""".format(script=script, real_pic_fn=real_pic_fn)
	return [urljoin(url, i) for i in eval(code)]
github eight04 / ComicCrawler / comiccrawler / gui / select_episodes.py View on Github external
def handle_click(event):
			if event.state & 0x0001 and self.anchor_index is not None: # shift
				start = min(self.anchor_index, index)
				end = max(self.anchor_index, index)
				for i in range(start, end + 1):
					if i in (index, self.anchor_index):
						continue
					check = self.checks[i]
					if check.instate(("selected", )):
						check.state(("!selected", ))
					else:
						check.state(("selected", ))
			else:
				self.anchor_index = index
				
		check = ttk.Checkbutton(self.window, text=safe_tk(ep.title))
		check.bind("", handle_click)
		check.state(("!alternate",))
		if not ep.skip:
			check.state(("selected",))
		check.grid(
			column=(index // 20) - self.window_column,
			row=index % 20,
			sticky="w"
		)
		self.checks.append(check)
github eight04 / ComicCrawler / comiccrawler / crawler.py View on Github external
def download():
		if not crawler.is_init:
			debug_log("D_INIT")
			crawler.init()
			
		if not crawler.html:
			debug_log("D_INIT_IMAGE")
			crawler.init_images()
	
		if not crawler.image:
			debug_log("D_NEXT_PAGE")
			crawler.next_page()
			return
			
		if crawler.page_exists():
			debug_log("D_NEXT_IMAGE")
			print("page {} already exist".format(crawler.ep.total + 1))
			crawler.next_image()
			return
github eight04 / ComicCrawler / comiccrawler / crawler.py View on Github external
def download():
		if not crawler.is_init:
			debug_log("D_INIT")
			crawler.init()
			
		if not crawler.html:
			debug_log("D_INIT_IMAGE")
			crawler.init_images()
	
		if not crawler.image:
			debug_log("D_NEXT_PAGE")
			crawler.next_page()
			return
			
		if crawler.page_exists():
			debug_log("D_NEXT_IMAGE")
			print("page {} already exist".format(crawler.ep.total + 1))
			crawler.next_image()
			return
			
		debug_log("D_RESOLVE")
		crawler.resolve_image()
		print("Downloading {} page {}: {}\n".format(
github eight04 / ComicCrawler / comiccrawler / crawler.py View on Github external
def download():
		if not crawler.is_init:
			debug_log("D_INIT")
			crawler.init()
			
		if not crawler.html:
			debug_log("D_INIT_IMAGE")
			crawler.init_images()
	
		if not crawler.image:
			debug_log("D_NEXT_PAGE")
			crawler.next_page()
			return
			
		if crawler.page_exists():
			debug_log("D_NEXT_IMAGE")
			print("page {} already exist".format(crawler.ep.total + 1))
			crawler.next_image()
			return
			
		debug_log("D_RESOLVE")
		crawler.resolve_image()
		print("Downloading {} page {}: {}\n".format(
			crawler.ep.title, crawler.ep.total + 1, crawler.image.url))
		debug_log("D_DOWNLOAD")
		crawler.download_image()
		debug_log("D_HANDLE")
github eight04 / ComicCrawler / comiccrawler / gui.py View on Github external
def beforequit():
			if download_manager.is_downloading():
				if not messagebox.askokcancel(
						"Comic Crawler",
						"任務下載中,確定結束?"):
					return
					
			# going to quit
			printer.remove_listener(self.sp_callback)		
		
			self.root.destroy()
			
			download_manager.stop_download()
			download_manager.stop_analyze()
			download_manager.stop_check_update()
			
			mission_manager.save()
			
			config.save()
github eight04 / ComicCrawler / comiccrawler / gui.py View on Github external
def beforequit():
			if download_manager.is_downloading():
				if not messagebox.askokcancel(
						"Comic Crawler",
						"任務下載中,確定結束?"):
					return
					
			# going to quit
			printer.remove_listener(self.sp_callback)		
		
			self.root.destroy()
			
			download_manager.stop_download()
			download_manager.stop_analyze()
			download_manager.stop_check_update()
			
			mission_manager.save()
			
			config.save()
github eight04 / ComicCrawler / comiccrawler / gui.py View on Github external
def beforequit():
			if download_manager.is_downloading():
				if not messagebox.askokcancel(
						"Comic Crawler",
						"任務下載中,確定結束?"):
					return
					
			# going to quit
			printer.remove_listener(self.sp_callback)		
		
			self.root.destroy()
			
			download_manager.stop_download()
			download_manager.stop_analyze()
			download_manager.stop_check_update()
			
			mission_manager.save()
github eight04 / ComicCrawler / comiccrawler / core / __init__.py View on Github external
def handle_error(self, error):
		"""Send error to error handler."""
		handler = getattr(self.mod, "errorhandler", None)
		if not handler:
			return

		try:
			handler(error, self)

		except Exception as err: # pylint: disable=broad-except
			print("[Crawler] Failed to handle error: {}".format(err))