How to use mlpm - 10 common examples

To help you get started, we’ve selected a few mlpm examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github autoai-org / AID / components / mlserve / mlpm / server.py View on Github external
@aidserver.route("/", methods=["GET"])
async def ping():
    return await json_resp({"status": "OK"}, status=200)
github autoai-org / AID / components / mlserve / aid / server.py View on Github external
@aidserver.route("/train", methods=["GET", "POST"])
async def train(request):
    if request.method == "POST":
        return handle_post_solver_train_or_infer(request, UPLOAD_TRAIN_FOLDER,
                                          "train")
github autoai-org / AID / components / mlserve / mlpm / server.py View on Github external
@aidserver.route("/batch", methods=["POST"])
async def batch_infer():
    if request.method == 'POST':
        return await handle_batch_infer_request(request, UPLOAD_INFER_FOLDER, PUBLIC_FOLDER)
github autoai-org / AID / components / mlserve / aid / server.py View on Github external
@aidserver.route("/infer", methods=["GET", "POST"])
async def infer(request):
    if request.method == 'POST':
        return handle_post_solver_train_or_infer(request, UPLOAD_INFER_FOLDER,
                                          "infer")
github autoai-org / AID / components / mlserve / mlpm / server.py View on Github external
@aidserver.route("/infer", methods=["GET", "POST"])
async def infer():
    if request.method == 'POST':
        return await handle_post_solver_train_or_infer(request, UPLOAD_INFER_FOLDER,
                                                 "infer", PUBLIC_FOLDER)
github autoai-org / AID / components / mlserve / aid / server.py View on Github external
@aidserver.route("/", methods=["GET"])
async def ping(request):
    return response.text('Hello world!', status=200)
github autoai-org / AID / components / mlserve / mlpm / server.py View on Github external
@aidserver.route("/train", methods=["GET", "POST"])
async def train():
    if request.method == "POST":
        return await handle_post_solver_train_or_infer(request, UPLOAD_TRAIN_FOLDER,
                                                 "train", PUBLIC_FOLDER)
github autoai-org / AID / components / mlserve / mlpm / server.py View on Github external
@aidserver.route("/static/")
async def send_static(filename):
    return await send_from_directory(os.path.abspath(PUBLIC_FOLDER), filename)
github autoai-org / AID / components / mlserve / aid / server.py View on Github external
def run_server(solver, port=None):
    if port is None:
        port = get_available_port()
    aidserver.solver = solver
    aidserver.run(host='0.0.0.0', port=port, workers=4)
github autoai-org / AID / components / mlserve / mlpm / handler.py View on Github external
request_type, target_folder):
    config = ImmutableMultiDict(await request.form)
    data = config.to_dict()
    results = {}
    if 'file' in await request.files:
        uploaded_file = await request.files['file']
        filename = secure_filename(uploaded_file.filename)
        # make sure the UPLOAD_FOLDER exsits
        if not os.path.isdir(upload_folder):
            os.makedirs(upload_folder)
        file_abs_path = os.path.join(upload_folder, filename)
        uploaded_file.save(file_abs_path)
        data['input_file_path'] = file_abs_path
    try:
        if request_type == "infer":
            results = aidserver.solver.infer(data)
        else:
            raise NotImplementedError
        if 'delete_after_process' in data:
            if str2bool(data['delete_after_process']):
                os.remove(file_abs_path)
        print(results)
        return json_resp(results, status=200)
    except Exception as e:
        traceback.print_exc()
        return json_resp({"error": str(e), "code": "500"}, status=500)