Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
async def update_run(
request: Request,
project: str,
uid: str,
iter: int = 0,
db_session: Session = Depends(deps.get_db_session)):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
logger.debug(data)
await run_in_threadpool(get_db().update_run, db_session, data, uid, project, iter=iter)
logger.info("update run: {}".format(data))
return {}
async def _fetch_places_list(params: PlacesQueryParam):
if params.source == PoiSource.PAGESJAUNES:
all_categories = [pj_category for c in params.category for pj_category in c["pj_filters"]]
return await run_in_threadpool(
pj_source.get_places_bbox, all_categories, params.bbox, size=params.size, query=params.q
)
if params.q:
# Default source (OSM) with query
bragi_response = await bragi_client.pois_query_in_bbox(
query=params.q, bbox=params.bbox, lang=params.lang, limit=params.size
)
return [BragiPOI(f) for f in bragi_response.get("features", [])]
# Default source (OSM) with category or class/subclass filters
if params.raw_filter:
raw_filters = params.raw_filter
else:
raw_filters = [f for c in params.category for f in c["raw_filters"]]
bbox_places = await run_in_threadpool(
async def _periodic_function_wrapper(interval: int, function, *args, **kwargs):
while True:
try:
if asyncio.iscoroutinefunction(function):
await function(*args, **kwargs)
else:
await run_in_threadpool(function, *args, **kwargs)
except Exception:
logger.warning(
f'Failed during periodic function execution: {function.__name__}, exc: {traceback.format_exc()}'
)
await asyncio.sleep(interval)
pj_source.get_places_bbox, all_categories, params.bbox, size=params.size, query=params.q
)
if params.q:
# Default source (OSM) with query
bragi_response = await bragi_client.pois_query_in_bbox(
query=params.q, bbox=params.bbox, lang=params.lang, limit=params.size
)
return [BragiPOI(f) for f in bragi_response.get("features", [])]
# Default source (OSM) with category or class/subclass filters
if params.raw_filter:
raw_filters = params.raw_filter
else:
raw_filters = [f for c in params.category for f in c["raw_filters"]]
bbox_places = await run_in_threadpool(
fetch_es_pois, raw_filters=raw_filters, bbox=params.bbox, max_size=params.size,
)
return [POI(p["_source"]) for p in bbox_places]
request: Request,
project: str,
name: str,
tag: str = "",
versioned: bool = False,
db_session: Session = Depends(deps.get_db_session)):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
logger.debug(data)
logger.info(
"store function: project=%s, name=%s, tag=%s", project, name, tag)
await run_in_threadpool(get_db().store_function, db_session, data, name, project, tag=tag, versioned=versioned)
return {}
async def build_function(
request: Request,
db_session: Session = Depends(deps.get_db_session)):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
logger.info("build_function:\n{}".format(data))
function = data.get("function")
with_mlrun = strtobool(data.get("with_mlrun", "on"))
fn, ready = await run_in_threadpool(_build_function, db_session, function, with_mlrun)
return {
"data": fn.to_dict(),
"ready": ready,
}
async def start_function(
request: Request,
db_session: Session = Depends(deps.get_db_session)):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
fn = await run_in_threadpool(_start_function, db_session, data)
return {
"data": fn.to_dict(),
}
async def store_run(
request: Request,
project: str,
uid: str,
iter: int = 0,
db_session: Session = Depends(deps.get_db_session)):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
logger.debug(data)
await run_in_threadpool(get_db().store_run, db_session, data, uid, project, iter=iter)
logger.info("store run: {}".format(data))
return {}
async def tag_objects(
request: Request,
project: str,
name: str,
db_session: Session = Depends(deps.get_db_session),
):
data = None
try:
data = await request.json()
except ValueError:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="bad JSON body")
objs = await run_in_threadpool(_tag_objects, db_session, data, project, name)
return {
"project": project,
"name": name,
"count": len(objs),
}
async def submit_pipeline(
request: Request,
namespace: str = config.namespace,
experiment_name: str = Query("Default", alias="experiment"),
run_name: str = Query("", alias="run"),
):
run_name = run_name or experiment_name + " " + datetime.now().strftime(
"%Y-%m-%d %H-%M-%S"
)
data = await request.body()
if not data:
log_and_raise(HTTPStatus.BAD_REQUEST, reason="post data is empty")
run = await run_in_threadpool(
_submit_pipeline, request, data, namespace, experiment_name, run_name
)
return {
"id": run.id,
"name": run.name,
}