Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def process_server_request(self, request, send_to_server):
pkdlog(f'Processing server request. Request: {request}')
reply_sent = tornado.locks.Event()
request.request_id = str(uuid.uuid4())
work_to_do = pkcollections.Dict(request)
self._server_responses[request.request_id] = pkcollections.Dict({
'send': send_to_server,
'reply_sent': reply_sent,
})
await self._driver_work_q.put(work_to_do)
await reply_sent.wait()
del self._server_responses[request.request_id]
def _get_propagation(op):
prop_dict = pkcollections.Dict()
counter = 0
for i in range(len(op.arProp) - 1):
name = op.arOpt[i].__class__.__name__
try:
next_name = op.arOpt[i + 1].__class__.__name__
except Exception:
next_name = None
if (name != 'SRWLOptD') or \
(name == 'SRWLOptD' and next_name == 'SRWLOptD') or \
((i + 1) == len(op.arProp) - 1): # exclude last drift
counter += 1
prop_dict[str(counter)] = [op.arProp[i]]
if next_name == 'SRWLOptD':
prop_dict[str(counter)].append(op.arProp[i + 1])
else:
def _init_uris(app, simulation_db):
global _default_route, _empty_route, srunit_uri, _api_to_route, _uri_to_route
assert not _default_route, \
'_init_uris called twice'
_uri_to_route = pkcollections.Dict()
_api_to_route = pkcollections.Dict()
for k, v in simulation_db.SCHEMA_COMMON.route.items():
r = _split_uri(v)
try:
r.func = _api_funcs[_FUNC_PREFIX + k]
except KeyError:
pkdc('not adding api, because module not registered: uri={}', v)
continue
sirepo.api_auth.assert_api_def(r.func)
r.decl_uri = v
r.name = k
assert not r.base_uri in _uri_to_route, \
'{}: duplicate end point; other={}'.format(v, _uri_to_route[r.base_uri])
_uri_to_route[r.base_uri] = r
_api_to_route[k] = r
if r.base_uri == '':
def _beamline_element(obj, idx, title, elem_type, position):
data = pkcollections.Dict()
data['id'] = idx
data['type'] = elem_type
data['title'] = title
data['position'] = position
if elem_type in ['aperture', 'obstacle']:
data['shape'] = obj.shape
data['horizontalOffset'] = obj.x
data['verticalOffset'] = obj.y
data['horizontalSize'] = obj.Dx * 1e3
data['verticalSize'] = obj.Dy * 1e3
elif elem_type == 'crl':
keys = ['attenuationLength', 'focalPlane', 'horizontalApertureSize', 'numberOfLenses', 'radius',
def __init__(self, monkeypatch, user_name='joeblow'):
from pykern import pkcollections
from sirepo.auth import github
self.values = pkcollections.Dict(
access_token='xyzzy',
data=pkcollections.Dict(
# don't really care about id as long as it is bound to login
id=user_name,
login=user_name,
),
)
monkeypatch.setattr(github, '_oauth_client', self)
def _create_html(zip_path, data):
"""Convert zip to html data
Args:
zip_path (py.path): what to embed
data (dict): simulation db
Returns:
py.path, str: file and mime type
"""
# Use same tmp directory
fp = zip_path.new(ext='.html')
values = pkcollections.Dict(data=data)
values.uri = uri_router.uri_for_api('importArchive', external=False)
values.server = uri_router.uri_for_api('importArchive')[:-len(values.uri)]
sc = simulation_db.SCHEMA_COMMON
values.appLongName = sc.appInfo[data.simulationType].longName
values.appShortName = sc.appInfo[data.simulationType].shortName
values.productLongName = sc.productInfo.longName
values.productShortName = sc.productInfo.shortName
values.zip = zip_path.read().encode('base64')
with open(str(fp), 'wb') as f:
fp.write(pkjinja.render_resource('archive.html', values))
return fp, 'text/html'
def _report_job_status(job_tracker, request):
pkdc('report_job_status: {}', request)
status = job_tracker.report_job_status(
#TODO(e-carlin): Find a common place to do pkio.py_path() these are littered around
pkio.py_path(request.run_dir), request.jhash
).value
return pkcollections.Dict({
'action': 'status_of_report_job',
'request_id': request.request_id,
'uid': request.uid,
'status': status,
})
def init_mock():
"""A mock cookie for pkcli"""
flask.g = pkcollections.Dict()
_State('')
set_sentinel()
flux_type = int(m[r]['fluxType'])
sValShort = 'Flux'; sValType = 'Flux through Finite Aperture'; sValUnit = 'ph/s/.1%bw'
if flux_type == 2:
sValShort = 'Intensity'
sValUnit = 'ph/s/.1%bw/mm^2'
is_gaussian = False
if 'models' in sim_in and _SIM_DATA.srw_is_gaussian_source(m['simulation']):
is_gaussian = True
#TODO(pjm): move filename and metadata to a constant, using _DATA_FILE_FOR_MODEL
if r == 'initialIntensityReport':
before_propagation_name = 'Before Propagation (E={photonEnergy} eV)'
elif r == 'sourceIntensityReport':
before_propagation_name = 'E={sourcePhotonEnergy} eV'
else:
before_propagation_name = 'E={photonEnergy} eV'
file_info = pkcollections.Dict({
'res_spec_se.dat': [['Photon Energy', 'Intensity', 'On-Axis Spectrum from Filament Electron Beam'], ['eV', _intensity_units(is_gaussian, sim_in)]],
'res_spec_me.dat': [['Photon Energy', sValShort, sValType], ['eV', sValUnit]],
'res_pow.dat': [['Horizontal Position', 'Vertical Position', 'Power Density', 'Power Density'], ['m', 'm', 'W/mm^2']],
'res_int_se.dat': [['Horizontal Position', 'Vertical Position', before_propagation_name, 'Intensity'], ['m', 'm', _intensity_units(is_gaussian, sim_in)]],
#TODO(pjm): improve multi-electron label
'res_int_pr_me.dat': [['Horizontal Position', 'Vertical Position', before_propagation_name, 'Intensity'], ['m', 'm', _intensity_units(is_gaussian, sim_in)]],
'res_int_pr_me_dcx.dat': [['Horizontal Position (conj.)', 'Horizontal Position', '', 'Degree of Coherence'], ['m', 'm', '']],
'res_int_pr_me_dcy.dat': [['Vertical Position (conj.)', 'Vertical Position', '', 'Degree of Coherence'], ['m', 'm', '']],
'res_int_pr_se.dat': [['Horizontal Position', 'Vertical Position', 'After Propagation (E={photonEnergy} eV)', 'Intensity'], ['m', 'm', _intensity_units(is_gaussian, sim_in)]],
_MIRROR_OUTPUT_FILE: [['Horizontal Position', 'Vertical Position', 'Optical Path Difference', 'Optical Path Difference'], ['m', 'm', 'm']],
})
filename = os.path.basename(filename)
title = file_info[filename][0][2]
if '{photonEnergy}' in title:
title = title.format(photonEnergy=m['simulation']['photonEnergy'])
elif '{sourcePhotonEnergy}' in title: