Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def tearDown(self):
shutil.rmtree(workdir)
os.removedirs(os.path.split(workdir)[0])
planet.logger = self.original_logger
try:
import libxslt
except:
try:
try:
# Python 2.5 bug 1704790 workaround (alas, Unix only)
import commands
if commands.getstatusoutput('xsltproc --version')[0] != 0:
raise ImportError
except:
from subprocess import Popen, PIPE
xsltproc=Popen(['xsltproc','--version'],stdout=PIPE,stderr=PIPE)
xsltproc.communicate()
if xsltproc.returncode != 0: raise ImportError
except:
logger.warn("libxslt is not available => can't test xslt filters")
del XsltFilterTests.test_xslt_filter
del XsltFilterTests.test_addsearch_filter
def setUp(self):
# silence errors
self.original_logger = planet.logger
planet.getLogger('CRITICAL',None)
try:
os.makedirs(workdir)
except:
self.tearDown()
os.makedirs(workdir)
if _no_sed:
try:
sed = Popen(['sed','--version'],stdout=PIPE,stderr=PIPE)
sed.communicate()
if sed.returncode == 0: _no_sed = False
except WindowsError:
pass
if _no_sed:
logger.warn("sed is not available => can't test stripAd_yahoo")
del FilterTests.test_stripAd_yahoo
try:
import libxml2
except:
logger.warn("libxml2 is not available => can't test xpath_sifter")
del FilterTests.test_xpath_filter1
del FilterTests.test_xpath_filter2
except ImportError:
logger.warn("Popen is not available => can't test standard filters")
for method in dir(FilterTests):
if method.startswith('test_'): delattr(FilterTests,method)
def writeCache(feed_uri, feed_info, data):
log = planet.logger
sources = config.cache_sources_directory()
blacklist = config.cache_blacklist_directory()
# capture http status
if not data.has_key("status"):
if data.has_key("entries") and len(data.entries)>0:
data.status = 200
elif data.bozo and \
data.bozo_exception.__class__.__name__.lower()=='timeout':
data.status = 408
else:
data.status = 500
activity_horizon = \
time.gmtime(time.time()-86400*config.activity_threshold(feed_uri))
def open():
try:
cache = config.cache_directory()
index=os.path.join(cache,'index')
if not os.path.exists(index): return None
import anydbm
return anydbm.open(filename(index, 'id'),'w')
except Exception, e:
if e.__class__.__name__ == 'DBError': e = e.args[-1]
from planet import logger as log
log.error(str(e))
def writeCache(feed_uri, feed_info, data):
log = planet.logger
sources = config.cache_sources_directory()
blacklist = config.cache_blacklist_directory()
# capture http status
if not data.has_key("status"):
if data.has_key("entries") and len(data.entries)>0:
data.status = 200
elif data.bozo and \
data.bozo_exception.__class__.__name__.lower()=='timeout':
data.status = 408
else:
data.status = 500
activity_horizon = \
time.gmtime(time.time()-86400*config.activity_threshold(feed_uri))
import planet
planet.getLogger('DEBUG',config.log_format())
if not offline:
from planet import spider
try:
spider.spiderPlanet(only_if_new=only_if_new)
except Exception, e:
print e
from planet import splice
doc = splice.splice()
if debug_splice:
from planet import logger
logger.info('writing debug.atom')
debug=open('debug.atom','w')
try:
from lxml import etree
from StringIO import StringIO
tree = etree.tostring(etree.parse(StringIO(doc.toxml())))
debug.write(etree.tostring(tree, pretty_print=True))
except:
debug.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
debug.close
splice.apply(doc.toxml('utf-8'))
if config.pubsubhubbub_hub() and not no_publish:
from planet import publish
publish.publish(config)
def splice():
""" Splice together a planet from a cache of entries """
import planet
log = planet.logger
log.info("Loading cached data")
cache = config.cache_directory()
dir=[(os.stat(file).st_mtime,file) for file in glob.glob(cache+"/*")
if not os.path.isdir(file)]
dir.sort()
dir.reverse()
max_items=max([config.items_per_page(templ)
for templ in config.template_files() or ['Planet']])
doc = minidom.parseString('')
feed = doc.documentElement
# insert feed information
createTextElement(feed, 'title', config.name())
import planet
planet.getLogger('DEBUG',config.log_format())
if not offline:
from planet import spider
try:
spider.spiderPlanet(only_if_new=only_if_new)
except Exception, e:
print e
from planet import splice
doc = splice.splice()
if debug_splice:
from planet import logger
logger.info('writing debug.atom')
debug=open('debug.atom','w')
try:
from lxml import etree
from StringIO import StringIO
tree = etree.tostring(etree.parse(StringIO(doc.toxml())))
debug.write(etree.tostring(tree, pretty_print=True))
except:
debug.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
debug.close
splice.apply(doc.toxml('utf-8'))
if config.pubsubhubbub_hub() and not no_publish:
from planet import publish
publish.publish(config)