How to use the syft.generic.frameworks.hook.hook_args.hook_response function in syft

To help you get started, we’ve selected a few syft examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github OpenMined / PySyft / syft / frameworks / torch / tensors / interpreters / paillier.py View on Github external
obj.child = data
            return obj

        if isinstance(self.child, th.Tensor):
            self.child = self.child.numpy()

        # Replace all syft tensor with their child attribute
        new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
            "__mul__", self, args, kwargs
        )

        # Send it to the appropriate class and get the response
        response = getattr(new_self, "__mul__")(*new_args, **new_kwargs)

        # Put back SyftTensor on the tensors found in the response
        response = hook_args.hook_response("__mul__", response, wrap_type=type(self))
        return response
github OpenMined / PySyft / syft / frameworks / torch / hook / hook.py View on Github external
def overloaded_attr(self, *args, **kwargs):
            """
            Operate the hooking
            """

            # Replace all syft tensor with their child attribute
            new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
                attr, self, args, kwargs
            )

            results = {}
            for k, v in new_self.items():
                results[k] = v.__getattribute__(attr)(*dispatch(new_args, k), **new_kwargs)

            # Put back AdditiveSharingTensor on the tensors found in the response
            response = hook_args.hook_response(
                attr,
                results,
                wrap_type=AdditiveSharingTensor,
                wrap_args=self.get_class_attributes(),
            )

            return response
github OpenMined / PySyft / syft / generic / frameworks / hook / hook.py View on Github external
def overloaded_attr(self, *args, **kwargs):
            """
            Operate the hooking
            """

            # Replace all syft tensor with their child attribute
            new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
                attr, self, args, kwargs
            )

            results = {}
            for k, v in new_self.items():
                results[k] = v.__getattribute__(attr)(*dispatch(new_args, k), **new_kwargs)

            # Put back MultiPointerTensor on the tensors found in the response
            response = hook_args.hook_response(
                attr, results, wrap_type=MultiPointerTensor, wrap_args=self.get_class_attributes()
            )

            return response
github OpenMined / PySyft / syft / frameworks / torch / tensors / decorators / sensitivity.py View on Github external
def sqrt(self, *args, **kwargs):
        # Replace all syft tensor with their child attribute
        new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
            "sqrt", self, args, kwargs
        )

        # Send it to the appropriate class and get the response
        response = getattr(new_self, "sqrt")(*new_args, **new_kwargs)

        # Put back SyftTensor on the tensors found in the response
        response = hook_args.hook_response("sqrt", response, wrap_type=type(self))

        l = self.l.sqrt(*args)
        h = self.h.sqrt(*args)
        el = self.el.sqrt(*args)
        eh = self.eh.sqrt(*args)

        response.l = l
        response.h = h
        response.el = el
        response.eh = eh
        response.accountant = self.accountant

        return response
github OpenMined / PySyft / syft / generic / frameworks / hook / hook.py View on Github external
def overloaded_syft_method(self, *args, **kwargs):
            """
            Operate the hooking
            """
            # Replace all syft tensor with their child attribute
            new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
                attr, self, args, kwargs
            )

            # Send it to the appropriate class and get the response
            response = getattr(new_self, attr)(*new_args, **new_kwargs)

            # Put back SyftTensor on the tensors found in the response
            response = hook_args.hook_response(
                attr, response, wrap_type=type(self), wrap_args=self.get_class_attributes()
            )

            return response
github OpenMined / PySyft / syft / frameworks / torch / tensors / interpreters / precision.py View on Github external
new_other = neg_other + pos_other

            # If both have the same sign, sgn is 1 else it's 0
            # To be able to write sgn = 1 - (sgn_self - sgn_other) ** 2,
            # we would need to overload the __add__ for operators int and AST.
            sgn = -((sgn_self - sgn_other) ** 2) + 1
            changed_sign = True

            if cmd == "div":
                new_self *= self.base ** self.precision_fractional

        # Send it to the appropriate class and get the response
        response = getattr(new_self, cmd)(new_other)

        # Put back SyftTensor on the tensors found in the response
        response = hook_args.hook_response(
            cmd, response, wrap_type=type(self), wrap_args=self.get_class_attributes()
        )

        if not isinstance(other, (int, torch.Tensor, AdditiveSharingTensor)):
            if cmd == "mul":
                # If operation is mul, we need to truncate
                response = response.truncate(self.precision_fractional, check_sign=False)

            response %= self.field  # Wrap around the field

            if changed_sign:
                # Give back its sign to response
                pos_res = response * sgn
                neg_res = response * (sgn - 1)
                response = neg_res + pos_res
github OpenMined / PySyft / syft / generic / pointers / pointer_plan.py View on Github external
self._locations,
            )

        # look for the relevant id in the list of ids
        id_at_location = None
        for loc, id_at_loc in zip(self._locations, self._ids_at_location):
            if loc == location:
                id_at_location = id_at_loc
                break

        command = ("run", id_at_location, args, kwargs)

        response = self.owner.send_command(
            message=command, recipient=location, return_ids=response_ids
        )
        response = hook_args.hook_response(plan_name, response, wrap_type=FrameworkTensor[0])
        response.garbage_collect_data = False
        return response
github OpenMined / PySyft / syft / frameworks / torch / tensors / decorators / sensitivity.py View on Github external
def __add__(self, *args, **kwargs):
        """
        Here is the version of the add method without the decorator: as you can see
        it is much more complicated. However you might need sometimes to specify
        some particular behaviour: so here what to start from :)
        """
        # Replace all syft tensor with their child attribute
        new_self, new_args, new_kwargs = hook_args.unwrap_args_from_method(
            "__add__", self, args, kwargs
        )

        # Send it to the appropriate class and get the response
        response = getattr(new_self, "__add__")(*new_args, **new_kwargs)

        # Put back SyftTensor on the tensors found in the response
        response = hook_args.hook_response("__add__", response, wrap_type=type(self))

        other = args[0]

        if isinstance(other, SensitivityTensor):

            l = self.l + other.l
            h = self.h + other.h
            el = self.el + other.el
            eh = self.eh + other.eh

        else:

            l = self.l + other
            h = self.h + other
            el = self.el
            eh = self.eh
github OpenMined / PySyft / syft / frameworks / torch / tensors / interpreters / precision.py View on Github external
new_other = neg_other + pos_other

            # If both have the same sign, sgn is 1 else it's 0
            # To be able to write sgn = 1 - (sgn_self - sgn_other) ** 2,
            # we would need to overload the __add__ for operators int and AST.
            sgn = -(sgn_self - sgn_other) ** 2 + 1
            changed_sign = True

            if cmd == "div":
                new_self *= self.base ** self.precision_fractional

        # Send it to the appropriate class and get the response
        response = getattr(new_self, cmd)(new_other)

        # Put back SyftTensor on the tensors found in the response
        response = hook_args.hook_response(
            cmd, response, wrap_type=type(self), wrap_args=self.get_class_attributes()
        )

        if not isinstance(other, (int, torch.Tensor, AdditiveSharingTensor)):
            if cmd == "mul":
                # If operation is mul, we need to truncate
                response = response.truncate(self.precision_fractional, check_sign=False)

            response %= self.field  # Wrap around the field

            if changed_sign:
                # Give back its sign to response
                pos_res = response * sgn
                neg_res = response * (sgn - 1)
                response = neg_res + pos_res