Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def get_host():
url = configuration.get_config_value('server', 'url')
url = url or 'http://localhost:5000/wps'
LOGGER.warn("starting WPS service on %s", url)
parsed_url = urlparse(url)
if ':' in parsed_url.netloc:
bind_host, port = parsed_url.netloc.split(':')
port = int(port)
else:
bind_host = parsed_url.netloc
port = 80
return bind_host, port
'site': config.get_config_value('metadata:main', 'provider_url'),
'individual': config.get_config_value('metadata:main', 'contact_name'),
'position': config.get_config_value('metadata:main', 'contact_position'),
'voice': config.get_config_value('metadata:main', 'contact_phone'),
'fascimile': config.get_config_value('metadata:main', 'contaact_fax'),
'address': {
'delivery': config.get_config_value('metadata:main', 'deliveryPoint'),
'city': config.get_config_value('metadata:main', 'contact_city'),
'state': config.get_config_value('metadata:main', 'contact_stateorprovince'),
'postalcode': config.get_config_value('metadata:main', 'contact_postalcode'),
'country': config.get_config_value('metadata:main', 'contact_country'),
'email': config.get_config_value('metadata:main', 'contact_email')
},
'url': config.get_config_value('metadata:main', 'contact_url'),
'hours': config.get_config_value('metadata:main', 'contact_hours'),
'instructions': config.get_config_value('metadata:main', 'contact_instructions'),
'role': config.get_config_value('metadata:main', 'contact_role')
},
'serviceurl': config.get_config_value('server', 'url'),
'languages': config.get_config_value('server', 'language').split(','),
'processes': processes
}
processes = [p.json for p in self.processes.values()]
return {
'pywps_version': __version__,
'version': self.version,
'title': config.get_config_value('metadata:main', 'identification_title'),
'abstract': config.get_config_value('metadata:main', 'identification_abstract'),
'keywords': config.get_config_value('metadata:main', 'identification_keywords').split(","),
'keywords_type': config.get_config_value('metadata:main', 'identification_keywords_type').split(","),
'fees': config.get_config_value('metadata:main', 'identification_fees'),
'accessconstraints': config.get_config_value(
'metadata:main',
'identification_accessconstraints'
).split(','),
'profile': config.get_config_value('metadata:main', 'identification_profile'),
'provider': {
'name': config.get_config_value('metadata:main', 'provider_name'),
'site': config.get_config_value('metadata:main', 'provider_url'),
'individual': config.get_config_value('metadata:main', 'contact_name'),
'position': config.get_config_value('metadata:main', 'contact_position'),
'voice': config.get_config_value('metadata:main', 'contact_phone'),
'fascimile': config.get_config_value('metadata:main', 'contaact_fax'),
'address': {
'delivery': config.get_config_value('metadata:main', 'deliveryPoint'),
'city': config.get_config_value('metadata:main', 'contact_city'),
'state': config.get_config_value('metadata:main', 'contact_stateorprovince'),
'postalcode': config.get_config_value('metadata:main', 'contact_postalcode'),
'country': config.get_config_value('metadata:main', 'contact_country'),
'email': config.get_config_value('metadata:main', 'contact_email')
},
'url': config.get_config_value('metadata:main', 'contact_url'),
'hours': config.get_config_value('metadata:main', 'contact_hours'),
'instructions': config.get_config_value('metadata:main', 'contact_instructions'),
def run_job(self):
LOGGER.info("Submitting job ...")
try:
import drmaa
session = drmaa.Session()
# init session
session.initialize()
# dump job to file
dump_filename = self.job.dump()
if not dump_filename:
raise Exception("Could not dump job status.")
# prepare remote command
jt = session.createJobTemplate()
jt.remoteCommand = os.path.join(
config.get_config_value('processing', 'path'),
'joblauncher')
if os.getenv("PYWPS_CFG"):
import shutil
cfg_file = os.path.join(self.job.workdir, "pywps.cfg")
shutil.copy2(os.getenv('PYWPS_CFG'), cfg_file)
LOGGER.debug("Copied pywps config: {}".format(cfg_file))
jt.args = ['-c', cfg_file, dump_filename]
else:
jt.args = [dump_filename]
jt.joinFiles = True
jt.outputPath = ":{}".format(os.path.join(self.job.workdir, "job-output.txt"))
# run job
jobid = session.runJob(jt)
LOGGER.info('Your job has been submitted with ID {}'.format(jobid))
# show status
import time
def output_path():
return configuration.get_config_value("server", "outputpath")
def __init__(self, processes=[], cfgfiles=None):
# ordered dict of processes
self.processes = OrderedDict((p.identifier, p) for p in processes)
if cfgfiles:
config.load_configuration(cfgfiles)
if config.get_config_value('logging', 'file') and config.get_config_value('logging', 'level'):
LOGGER.setLevel(getattr(logging, config.get_config_value('logging', 'level')))
if not LOGGER.handlers: # hasHandlers in Python 3.x
fh = logging.FileHandler(config.get_config_value('logging', 'file'))
fh.setFormatter(logging.Formatter(config.get_config_value('logging', 'format')))
LOGGER.addHandler(fh)
else: # NullHandler | StreamHandler
if not LOGGER.handlers:
LOGGER.addHandler(logging.NullHandler())
def _run_process(self, wps_request, wps_response):
LOGGER.debug("Started processing request: {}".format(self.uuid))
try:
self._set_grass(wps_request)
# if required set HOME to the current working directory.
if config.get_config_value('server', 'sethomedir') is True:
os.environ['HOME'] = self.workdir
LOGGER.info('Setting HOME to current working directory: {}'.format(os.environ['HOME']))
LOGGER.debug('ProcessID={}, HOME={}'.format(self.uuid, os.environ.get('HOME')))
wps_response._update_status(WPS_STATUS.STARTED, u'PyWPS Process started', 0)
self.handler(wps_request, wps_response) # the user must update the wps_response.
# Ensure process termination
if wps_response.status != WPS_STATUS.SUCCEEDED and wps_response.status != WPS_STATUS.FAILED:
# if (not wps_response.status_percentage) or (wps_response.status_percentage != 100):
LOGGER.debug('Updating process status to 100% if everything went correctly')
wps_response._update_status(WPS_STATUS.SUCCEEDED, 'PyWPS Process {} finished'.format(self.title), 100)
except Exception as e:
traceback.print_exc()
LOGGER.debug('Retrieving file and line number where exception occurred')
exc_type, exc_obj, exc_tb = sys.exc_info()
found = False
while not found:
LOGGER.debug('Initializing database connection')
global _SESSION_MAKER
global _LAST_SESSION
if _LAST_SESSION:
_LAST_SESSION.close()
if _SESSION_MAKER:
_SESSION_MAKER.close_all()
_LAST_SESSION = _SESSION_MAKER()
return _LAST_SESSION
database = configuration.get_config_value('logging', 'database')
echo = True
level = configuration.get_config_value('logging', 'level')
level_name = logging.getLevelName(level)
if isinstance(level_name, int) and level_name >= logging.INFO:
echo = False
try:
if database.startswith("sqlite") or database.startswith("memory"):
engine = sqlalchemy.create_engine(database,
connect_args={'check_same_thread': False},
poolclass=StaticPool,
echo=echo)
else:
engine = sqlalchemy.create_engine(database, echo=echo, poolclass=NullPool)
except sqlalchemy.exc.SQLAlchemyError as e:
raise NoApplicableCode("Could not connect to database: {}".format(e.message))
Session = sessionmaker(bind=engine)
ProcessInstance.metadata.create_all(engine)
def wfs_common(request, response, mode, spatial_mode='wfs', dir_output=','):
"""Common part of wps process for wfs operations.
:param request: request for wps process handler
:param response: response for wps process handler
:param mode: 'subsetter' or 'averager'
:return response: wps process response
"""
outputpath = configuration.get_config_value('server', 'outputpath')
outputurl = configuration.get_config_value('server', 'outputurl')
list_of_files = []
for one_resource in request.inputs['resource']:
# Download if not opendap
# Adding a maximum file size from a server config file would
# be possible here...
try:
nc_file = opendap_or_download(
one_resource.data,
auth_tkt_cookie=request.http_request.cookies,
output_path='/tmp')
except Exception:
raise Exception(traceback.format_exc())
list_of_files.append(nc_file)
if ('typename' in request.inputs) and ('featureids' in request.inputs):