How to use the pydantic.PositiveInt function in pydantic

To help you get started, we’ve selected a few pydantic examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github awslabs / gluon-ts / test / distribution / test_distribution_inference.py View on Github external
distr = Gaussian(mus, sigmas)
    samples = distr.sample()

    init_biases = [
        mu - START_TOL_MULTIPLE * TOL * mu,
        inv_softplus(sigma - START_TOL_MULTIPLE * TOL * sigma),
    ]

    mu_hat, sigma_hat = maximum_likelihood_estimate_sgd(
        GaussianOutput(),
        samples,
        init_biases=init_biases,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.001),
        num_epochs=PositiveInt(5),
    )

    assert (
        np.abs(mu_hat - mu) < TOL * mu
    ), f"mu did not match: mu = {mu}, mu_hat = {mu_hat}"
    assert (
        np.abs(sigma_hat - sigma) < TOL * sigma
    ), f"alpha did not match: sigma = {sigma}, sigma_hat = {sigma_hat}"
github awslabs / gluon-ts / test / distribution / test_distribution_inference.py View on Github external
rank=rank,
    )

    assert np.allclose(
        distr.variance[0].asnumpy(), Sigma, atol=0.1, rtol=0.1
    ), f"did not match: sigma = {Sigma}, sigma_hat = {distr.variance[0]}"

    samples = distr.sample(num_samples).squeeze().asnumpy()

    mu_hat, D_hat, W_hat = maximum_likelihood_estimate_sgd(
        LowrankMultivariateGaussianOutput(
            dim=dim, rank=rank, sigma_init=0.2, sigma_minimum=0.0
        ),
        samples,
        learning_rate=PositiveFloat(0.01),
        num_epochs=PositiveInt(25),
        init_biases=None,  # todo we would need to rework biases a bit to use it in the multivariate case
        hybridize=hybridize,
    )

    distr = LowrankMultivariateGaussian(
        dim=dim,
        rank=rank,
        mu=mx.nd.array([mu_hat]),
        D=mx.nd.array([D_hat]),
        W=mx.nd.array([W_hat]),
    )

    Sigma_hat = distr.variance.asnumpy()

    assert np.allclose(
        mu_hat, mu, atol=0.2, rtol=0.1
github awslabs / gluon-ts / test / distribution / test_distribution_inference.py View on Github external
def maximum_likelihood_estimate_sgd(
    distr_output: DistributionOutput,
    samples: mx.ndarray,
    init_biases: List[mx.ndarray.NDArray] = None,
    num_epochs: PositiveInt = PositiveInt(5),
    learning_rate: PositiveFloat = PositiveFloat(1e-2),
    hybridize: bool = True,
) -> Iterable[float]:
    model_ctx = mx.cpu()

    arg_proj = distr_output.get_args_proj()
    arg_proj.initialize()

    if hybridize:
        arg_proj.hybridize()

    if init_biases is not None:
        for param, bias in zip(arg_proj.proj, init_biases):
            param.params[param.prefix + "bias"].initialize(
                mx.initializer.Constant(bias), force_reinit=True
            )
github awslabs / gluon-ts / test / distribution / test_distribution_inference.py View on Github external
dim = 3

    alpha = np.array([1.0, 2.0, 3.0])

    distr = Dirichlet(alpha=mx.nd.array(alpha))
    cov = distr.variance.asnumpy()

    samples = distr.sample(num_samples)

    alpha_hat = maximum_likelihood_estimate_sgd(
        DirichletOutput(dim=dim),
        samples,
        init_biases=None,
        hybridize=hybridize,
        learning_rate=PositiveFloat(0.05),
        num_epochs=PositiveInt(10),
    )

    distr = Dirichlet(alpha=mx.nd.array(alpha_hat))

    cov_hat = distr.variance.asnumpy()

    assert np.allclose(
        alpha_hat, alpha, atol=0.1, rtol=0.1
    ), f"alpha did not match: alpha = {alpha}, alpha_hat = {alpha_hat}"
    assert np.allclose(
        cov_hat, cov, atol=0.1, rtol=0.1
    ), f"Covariance did not match: cov = {cov}, cov_hat = {cov_hat}"
github rafalp / Misago / misago / graphql / mutations / closethreads.py View on Github external
async def create_input_model(context: GraphQLContext) -> CloseThreadsInputModel:
    return create_model(
        "CloseThreadsInputModel",
        threads=(bulkactionidslist(PositiveInt, context["settings"]), ...),
        is_closed=(bool, ...),
    )
github Skyscanner / pycfmodel / pycfmodel / model / parameter.py View on Github external
class Parameter(CustomModel):
    """
    CloudFormation Parameter object representation
    """

    NO_ECHO_NO_DEFAULT: ClassVar[str] = "NO_ECHO_NO_DEFAULT"
    NO_ECHO_WITH_DEFAULT: ClassVar[str] = "NO_ECHO_WITH_DEFAULT"
    NO_ECHO_WITH_VALUE: ClassVar[str] = "NO_ECHO_WITH_VALUE"
    AllowedPattern: Optional[str] = None
    AllowedValues: Optional[List] = None
    ConstraintDescription: Optional[str] = None
    Default: Optional[Any] = None
    Description: Optional[str] = None
    MaxLength: Optional[PositiveInt] = None
    MaxValue: Optional[PositiveInt] = None
    MinLength: Optional[int] = None
    MinValue: Optional[int] = None
    NoEcho: Optional[bool] = None
    Type: str

    def get_ref_value(self, provided_value=None) -> Optional[str]:
        """
        Calculates the parameter value to be used in the template.

        - If `NoEcho` property is set, it uses a constant value.
        - If it is a list of numbers or a comma delimited list, returns the string version of each element in a list.
        - Returns None if `provided_value` and `Default` are `None`.

        Arguments:
            provided_value: Value injected in the template
github rafalp / Misago / misago / graphql / mutations / editpost.py View on Github external
async def create_input_model(context: GraphQLContext) -> EditPostInputModel:
    return create_model(
        "EditPostInputModel",
        post=(PositiveInt, ...),
        body=(constr(strip_whitespace=True), ...),
    )
github rafalp / Misago / misago / graphql / mutations / closethread.py View on Github external
async def create_input_model(context: GraphQLContext) -> CloseThreadInputModel:
    return create_model(
        "CloseThreadInputModel", thread=(PositiveInt, ...), is_closed=(bool, ...),
    )
github seandstewart / typical / benchmark / models / pyd.py View on Github external
qual_level_ranking: float = 0


class Model(BaseModel):
    class Config(BaseConfig):
        validate_all = True
        validate_assignment = True
        orm_mode = True

    id: int
    client_name: DBString
    sort_index: float
    client_phone: Optional[DBString] = None
    grecaptcha_response: Optional[GReCaptchaResponse] = None
    location: Optional[Location] = None
    contractor: Optional[PositiveInt] = None
    upstream_http_referrer: Optional[HTTPReferer] = None
    last_updated: Optional[datetime] = None
    skills: List[Skill] = []


def validate(data):
    try:
        return True, Model(**data)
    except ValidationError as err:
        return False, err


def deserialize(data):
    return validate(data)
github rafalp / Misago / misago / graphql / mutations / movethreads.py View on Github external
async def create_input_model(context: GraphQLContext) -> MoveThreadsInputModel:
    return create_model(
        "MoveThreadsInputModel",
        threads=(bulkactionidslist(PositiveInt, context["settings"]), ...),
        category=(PositiveInt, ...),
    )