How to use the rally.task.validation.add function in rally

To help you get started, we’ve selected a few rally examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github openstack / rally-openstack / rally_openstack / scenarios / ceilometer / events.py View on Github external
kbasic.KeystoneBasic):

    def run(self):
        """Create user and fetch all event types.

        This scenario creates user to store new event and
        fetches list of all events types using GET /v2/event_types.
        """
        self.admin_keystone.create_user()
        event_types = self._list_event_types()
        msg = ("Event types list is empty, but it should include at least one"
               " type about user creation")
        self.assertTrue(event_types, msg)


@validation.add("required_services", services=[consts.Service.CEILOMETER,
                                               consts.Service.KEYSTONE])
@validation.add("required_platform", platform="openstack", admin=True)
@scenario.configure(context={"admin_cleanup@openstack": ["keystone"],
                             "cleanup@openstack": ["ceilometer"]},
                    name="CeilometerEvents.create_user_and_get_event",
                    platform="openstack")
class CeilometerEventsCreateUserAndGetEvent(cutils.CeilometerScenario,
                                            kbasic.KeystoneBasic):

    def run(self):
        """Create user and gets event.

        This scenario creates user to store new event and
        fetches one event using GET /v2/events/.
        """
        self.admin_keystone.create_user()
github cloud-bulldozer / browbeat / rally / rally-plugins / workloads / sysbench.py View on Github external
import logging

from rally_openstack.scenarios.neutron import utils as neutron_utils
from rally_openstack.scenarios.vm import utils as vm_utils
from rally.common import sshutils
from rally.task import scenario
from rally.task import types
from rally.task import validation
from rally_openstack import consts

LOG = logging.getLogger(__name__)

@types.convert(image={"type": "glance_image"}, flavor={"type": "nova_flavor"})
@validation.add("image_valid_on_flavor", flavor_param="flavor", image_param="image")
@validation.add("required_services", services=[consts.Service.NEUTRON, consts.Service.NOVA])
@validation.add("required_platform", platform="openstack", users=True)
@scenario.configure(context={"cleanup@openstack": ["neutron", "nova"],
                             "keypair@openstack": {}, "allow_ssh@openstack": None},
                    name="BrowbeatPlugin.sysbench", platform="openstack")
class BrowbeatSysbench(vm_utils.VMScenario,
                       neutron_utils.NeutronScenario):
    def build_host(self, external, image, flavor, user, password=None, **kwargs):
        keyname = self.context["user"]["keypair"]["name"]
        host, host_ip = self._boot_server_with_fip(image,
                                                   flavor,
                                                   use_floating_ip=True,
                                                   floating_network=external[
                                                       'name'],
                                                   key_name=keyname,
                                                   **kwargs)
        # Wait for ping
        self._wait_for_ping(host_ip['ip'])
github openstack / rally-openstack / rally_openstack / scenarios / cinder / volume_backups.py View on Github external
from rally_openstack import consts
from rally_openstack import scenario
from rally_openstack.scenarios.cinder import utils as cinder_utils


"""Scenarios for Cinder Volume Backup."""


@validation.add("number", param_name="size", minval=1, integer_only=True)
@validation.add("restricted_parameters", param_names=["name", "display_name"],
                subdict="create_volume_kwargs")
@validation.add("restricted_parameters", param_names="name",
                subdict="create_backup_kwargs")
@validation.add("required_services", services=[consts.Service.CINDER])
@validation.add("required_cinder_services", services="cinder-backup")
@validation.add("required_platform", platform="openstack", users=True)
@scenario.configure(
    context={"cleanup@openstack": ["cinder"]},
    name="CinderVolumeBackups.create_incremental_volume_backup",
    platform="openstack")
class CreateIncrementalVolumeBackup(cinder_utils.CinderBasic):
    def run(self, size, do_delete=True, create_volume_kwargs=None,
            create_backup_kwargs=None):
        """Create a incremental volume backup.

        The scenario first create a volume, the create a backup, the backup
        is full backup. Because Incremental backup must be based on the
        full backup. finally create a incremental backup.

        :param size: volume size in GB
        :param do_delete: deletes backup and volume after creating if True
github openstack / rally-openstack / rally_openstack / scenarios / cinder / volume_types.py View on Github external
specs = {
                "provider": provider,
                "cipher": cipher,
                "key_size": key_size,
                "control_location": control_location
            }
        else:
            LOG.warning("The argument `create_spec` is deprecated since"
                        " Rally 0.10.0. Specify all arguments from it"
                        " explicitly.")
            specs = create_specs
        self.admin_cinder.create_encryption_type(volume_type,
                                                 specs=specs)


@validation.add("required_params", params=[("create_specs", "provider")])
@validation.add("required_services", services=[consts.Service.CINDER])
@validation.add("required_platform", platform="openstack", admin=True)
@scenario.configure(
    context={"admin_cleanup@openstack": ["cinder"]},
    name="CinderVolumeTypes.create_and_list_encryption_type",
    platform="openstack")
class CreateAndListEncryptionType(cinder_utils.CinderBasic):

    def run(self, create_specs=None, provider=None, cipher=None,
            key_size=None, control_location="front-end", search_opts=None):
        """Create and list encryption type

        This scenario firstly creates a volume type, secondly creates an
        encryption type for the volume type, thirdly lists all encryption
        types.
github openstack / rally-openstack / rally_openstack / scenarios / nova / aggregates.py View on Github external
class CreateAndGetAggregateDetails(utils.NovaScenario):
    """Scenario for create and get aggregate details."""

    def run(self, availability_zone):
        """Create an aggregate and then get its details.

        This scenario first creates an aggregate and then get details of it.
        :param availability_zone: The availability zone of the aggregate
        """
        aggregate = self._create_aggregate(availability_zone)
        self._get_aggregate_details(aggregate)


@types.convert(image={"type": "glance_image"})
@validation.add("required_services", services=[consts.Service.NOVA])
@validation.add("required_platform", platform="openstack",
                admin=True, users=True)
@scenario.configure(
    context={"admin_cleanup@openstack": ["nova"],
             "cleanup@openstack": ["nova"]},
    name="NovaAggregates.create_aggregate_add_host_and_boot_server",
    platform="openstack")
class CreateAggregateAddHostAndBootServer(utils.NovaScenario):
    """Scenario to verify an aggregate."""

    def run(self, image, metadata, availability_zone=None, ram=512, vcpus=1,
            disk=1, boot_server_kwargs=None):
        """Scenario to create and verify an aggregate

        This scenario creates an aggregate, adds a compute host and metadata
        to the aggregate, adds the same metadata to the flavor and creates an
        instance. Verifies that instance host is one of the hosts in the
github openstack / rally-openstack / rally_openstack / scenarios / grafana / metrics.py View on Github external
from rally.task import validation

from rally_openstack import consts
from rally_openstack import scenario
from rally_openstack.services.grafana import grafana as grafana_service

CONF = cfg.CONF
LOG = logging.getLogger(__name__)

"""Scenarios for Pushgateway and Grafana metrics."""


@types.convert(image={"type": "glance_image"},
               flavor={"type": "nova_flavor"})
@validation.add("required_services", services=[consts.Service.NOVA])
@validation.add("required_platform", platform="openstack", admin=True)
@scenario.configure(context={"cleanup@openstack": ["nova"]},
                    name="GrafanaMetrics.push_metric_from_instance",
                    platform="openstack")
class PushMetricsInstance(scenario.OpenStackScenario):
    """Test monitoring system by pushing metric from nova server and check it.

    Scenario tests monitoring system, which uses Pushgateway as metric exporter
    and Grafana as metrics monitoring.

    The goal of the test is to check that monitoring system works correctly
    with nova instance. Test case is the following: we deploy some env with
    nodes on Openstack nova instances, add metric exporter (using Pushgateway
    in this test) inside nodes (i.e. nova instances) for some interested
    metrics (e.g. CPU, memory etc.). We want to check that metrics successfully
    sends to metrics storage (e.g. Prometheus) by requesting Grafana. Create
    nova instance, add Pushgateway push random metric to userdata and after
github openstack / rally-openstack / rally_openstack / scenarios / mistral / executions.py View on Github external
:param sort_keys: id,description
        :param sort_dirs: [SORT_DIRS] Comma-separated list of sort directions.
                          Default: asc.
        """
        self._list_executions(marker=marker, limit=limit,
                              sort_keys=sort_keys, sort_dirs=sort_dirs)


@types.convert(definition={"type": "file"})
@types.convert(params={"type": "file"})
@types.convert(wf_input={"type": "file"})
@validation.add("file_exists", param_name="definition")
@validation.add("required_platform", platform="openstack", users=True)
@validation.add("required_services",
                services=[consts.Service.MISTRAL])
@validation.add("workbook_contains_workflow",
                workbook_param="definition",
                workflow_param="workflow_name")
@scenario.configure(name="MistralExecutions.create_execution_from_workbook",
                    context={"cleanup@openstack": ["mistral"]},
                    platform="openstack")
class CreateExecutionFromWorkbook(utils.MistralScenario):

    def run(self, definition, workflow_name=None, wf_input=None, params=None,
            do_delete=False):
        """Scenario tests execution creation and deletion.

        This scenario is a very useful tool to measure the
        "mistral execution-create" and "mistral execution-delete"
        commands performance.
        :param definition: string (yaml string) representation of given file
                           content (Mistral workbook definition)
github openstack / rally-openstack / rally_openstack / scenarios / gnocchi / metric.py View on Github external
class CreateMetric(gnocchiutils.GnocchiBase):

    def run(self, archive_policy_name="low", resource_id=None, unit=None):
        """Create metric.

        :param archive_policy_name: Archive policy name
        :param resource_id: The resource ID to attach the metric to
        :param unit: The unit of the metric
        """
        name = self.generate_random_name()
        self.gnocchi.create_metric(name,
                                   archive_policy_name=archive_policy_name,
                                   resource_id=resource_id, unit=unit)


@validation.add("required_services", services=[consts.Service.GNOCCHI])
@validation.add("required_platform", platform="openstack", users=True)
@scenario.configure(context={"cleanup@openstack": ["gnocchi.metric"]},
                    name="GnocchiMetric.create_delete_metric")
class CreateDeleteMetric(gnocchiutils.GnocchiBase):

    def run(self, archive_policy_name="low", resource_id=None, unit=None):
        """Create metric and then delete it.

        :param archive_policy_name: Archive policy name
        :param resource_id: The resource ID to attach the metric to
        :param unit: The unit of the metric
        """
        name = self.generate_random_name()
        metric = self.gnocchi.create_metric(
            name, archive_policy_name=archive_policy_name,
            resource_id=resource_id, unit=unit)
github openstack / rally-openstack / rally_openstack / scenarios / heat / stacks.py View on Github external
:param environment: stack environment definition
        """

        stack = self._create_stack(template_path, parameters,
                                   files, environment)
        self._check_stack(stack)
        self._delete_stack(stack)


@types.convert(template_path={"type": "file"},
               updated_template_path={"type": "file"},
               files={"type": "file_dict"},
               updated_files={"type": "file_dict"})
@validation.add("required_services", services=[consts.Service.HEAT])
@validation.add("validate_heat_template", params="template_path")
@validation.add("required_platform", platform="openstack", users=True)
@scenario.configure(context={"cleanup@openstack": ["heat"]},
                    name="HeatStacks.create_update_delete_stack",
                    platform="openstack")
class CreateUpdateDeleteStack(utils.HeatScenario):

    def run(self, template_path, updated_template_path,
            parameters=None, updated_parameters=None,
            files=None, updated_files=None,
            environment=None, updated_environment=None):
        """Create, update and then delete a stack.

        Measure the "heat stack-create", "heat stack-update"
        and "heat stack-delete" commands performance.

        :param template_path: path to stack template file
        :param updated_template_path: path to updated stack template file
github openstack / rally-openstack / rally_openstack / scenarios / murano / environments.py View on Github external
@validation.add("required_services", services=[consts.Service.MURANO])
@scenario.configure(context={"cleanup@openstack": ["murano.environments"]},
                    name="MuranoEnvironments.create_and_delete_environment",
                    platform="openstack")
class CreateAndDeleteEnvironment(utils.MuranoScenario):

    def run(self):
        """Create environment, session and delete environment."""
        environment = self._create_environment()

        self._create_session(environment.id)
        self._delete_environment(environment)


@validation.add("required_services", services=[consts.Service.MURANO])
@validation.add("required_contexts", contexts=("murano_packages"))
@scenario.configure(context={"cleanup@openstack": ["murano"],
                             "roles@openstack": ["admin"]},
                    name="MuranoEnvironments.create_and_deploy_environment",
                    platform="openstack")
class CreateAndDeployEnvironment(utils.MuranoScenario):

    def run(self, packages_per_env=1):
        """Create environment, session and deploy environment.

        Create environment, create session, add app to environment
        packages_per_env times, send environment to deploy.

        :param packages_per_env: number of packages per environment
        """
        environment = self._create_environment()
        session = self._create_session(environment.id)