How to use the sanic.log.logger.debug function in sanic

To help you get started, we’ve selected a few sanic examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github gangtao / dataplay3 / server / dataplay / mlsvc / automl.py View on Github external
def train(self):
        logger.debug('start to train')
        self._update_status(MLJobStatus.TRAINING)
        self.start_time = datetime.datetime.now().timestamp()
        try:
            self._save_meta()
            self._prepare()
            logger.debug('prepare complete')
            self.model.fit(self.X_train, self.y_train)
            self._save_model()
            logger.debug('train complete')
            self._update_status(MLJobStatus.VALIDATING)
            self._validate()
            logger.debug('validation complete')
            self._update_status(MLJobStatus.SUCCESS)
            self.end_time = datetime.datetime.now().timestamp()
            self._parse_model_representation()
            self.model_stats = self.model.sprint_statistics()
            self._save_meta()
        except Exception as e:
            logger.exception(f'faile to train the auto ml job {e}')
            self._update_status(MLJobStatus.FAILED)
            self.training_error = str(e)
            self._save_meta()
github gangtao / dataplay3 / server / dataplay / mlsvc / time_serials.py View on Github external
def train(self):
        logger.debug('start to train')
        self._update_status(MLJobStatus.TRAINING)
        self.start_time = datetime.datetime.now().timestamp()
        try:
            self._save_meta()
            self._prepare()
            logger.debug('prepare complete')
            self.model.fit(self.train_dataset)
            logger.debug('train complete')
            self._save_model()
            self._update_status(MLJobStatus.VALIDATING)
            self._validate()
            logger.debug('validation complete')
            self._update_status(MLJobStatus.SUCCESS)
            self.end_time = datetime.datetime.now().timestamp()
            self._save_meta()
        except Exception as e:
            self._update_status(MLJobStatus.FAILED)
            self.training_error = str(e)
            self._save_meta()
github huge-success / sanic / sanic / app.py View on Github external
):
            listeners = self.listeners[event_name].copy()
            if reverse:
                listeners.reverse()
            # Prepend sanic to the arguments when listeners are triggered
            listeners = [partial(listener, self) for listener in listeners]
            server_settings[settings_name] = listeners

        if self.configure_logging and debug:
            logger.setLevel(logging.DEBUG)

        if (
            self.config.LOGO
            and os.environ.get("SANIC_SERVER_RUNNING") != "true"
        ):
            logger.debug(
                self.config.LOGO
                if isinstance(self.config.LOGO, str)
                else BASE_LOGO
            )

        if run_async:
            server_settings["run_async"] = True

        # Serve
        if host and port and os.environ.get("SANIC_SERVER_RUNNING") != "true":
            proto = "http"
            if ssl is not None:
                proto = "https"
            logger.info("Goin' Fast @ {}://{}:{}".format(proto, host, port))

        return server_settings
github gangtao / dataplay3 / server / dataplay / datasvc / registry.py View on Github external
def register(self, name, class_name, module):
        logger.debug(
            "DatasetTypeRegistry registered name=%s, class=%s, module=%s"
            % (name, class_name, module)
        )

        if not isinstance(module, str):
            logger.exception("Wrong module provided, %s is not a module." % (module))
            raise RuntimeError('Error while register dataset type "%s %s"' % (name, module))

        try:
            importlib.import_module(module)
        except Exception as e:
            logger.exception("Wrong module provided, failed to load %s as a module." % (module))
            raise RuntimeError(
                'Error while register dataset type "%s %s":%s' % (name, module, str(e))
            )
github gangtao / dataplay3 / server / dataplay / mlsvc / service.py View on Github external
async def delete_job(request, id):
    logger.debug(f'delete ml jobs {id}')
    try:
        MLJobManager.delete_job(id)
        return response.json({}, status=204)
    except Exception:
        logger.exception('faile to delete ml job')
        return response.json({}, status=500)
github gangtao / dataplay3 / server / dataplay / mlsvc / job.py View on Github external
def delete_job_by_id(job_id):
        job_dir = os.path.join(MLJob.base_dir, job_id)
        try:
            shutil.rmtree(job_dir)
        except Exception:
            logger.exception(f'failed to delete job dir {job_dir}')
        else:
            logger.debug(f'successfully deleted the directory {job_dir}')
github gangtao / dataplay3 / server / dataplay / datasvc / csv.py View on Github external
def _load(self):
        logger.debug(f'load csv from {self.path}')
        self.df = pd.read_csv(self.path)
        logger.debug(f'load csv from {self.path} success')
github gangtao / dataplay3 / server / dataplay / mlsvc / service.py View on Github external
async def list_jobs(request):
    logger.debug('list ml jobs with condition={request.args}')
    args = request.args
    try:
        jobs = MLJobManager.list_jobs()
        if args and 'type' in args:
            query_jobs = [job for job in jobs if job['type'] in args['type']]
            return response.json(query_jobs, status=200)
        else:
            return response.json(jobs, status=200)
    except Exception:
        logger.exception('failed to list ml jobs')
        return response.json({}, status=500)
github gangtao / dataplay3 / server / dataplay / mlsvc / job.py View on Github external
def _save_model(self):
        logger.debug(
            f'save model for class={type(self).__name__} id={self.id} name={self.name}'
        )
        model_file = os.path.join(self.job_dir, 'model.joblib')
        dump(self, model_file)
        logger.debug('save model complete')