How to use the channels.layers.get_channel_layer function in channels

To help you get started, we’ve selected a few channels examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github and-sm / testgr / loader / signals.py View on Github external
# Statistics
        if job_object.stop_time is not None:
            result['stop_time'] = timezone.localtime(job_object.stop_time).strftime('%d-%b-%Y, %H:%M:%S')
        else:
            result['stop_time'] = None
        if job_object.time_taken is not None:
            result['time_taken'] = job_object.get_time_taken()
        else:
            result['time_taken'] = None

        result['status'] = str(job_object.status)

        return result

    if created:
        channel_layer = get_channel_layer()
        async_to_sync(channel_layer.group_send)(
            "job_details" + "-" + instance.uuid,
            {
                "type": "message",
                "message": data()
            }
        )

    if instance:
        channel_layer = get_channel_layer()
        async_to_sync(channel_layer.group_send)(
            "job_details" + "-" + instance.uuid,
            {
                "type": "message",
                "message": data()
            }
github ShipChain / transmission / tests / profiles-enabled / async / test_async.py View on Github external
shipment, _ = await sync_to_async(Shipment.objects.get_or_create)(
        id=random_id(),
        owner_id=USER_ID,
        storage_credentials_id=random_id(),
        shipper_wallet_id=random_id(),
        carrier_wallet_id=random_id(),
        contract_version='1.0.0'
    )

    # Update shipment (should get a message)
    shipment.carriers_scac = 'TESTING123'
    await sync_to_async(shipment.save)()
    # Have to manually send message to channel

    channel_layer = get_channel_layer()
    await channel_layer.group_send(shipment.owner_id, {
        "type": "shipments.update",
        "shipment_id": shipment.id
    })

    # Re-enable Shipment post-save signal
    await sync_to_async(models.signals.post_save.connect)(shipment_post_save, sender=Shipment,
                                                          dispatch_uid='shipment_post_save')
    response = await communicator.receive_json_from()

    assert response['event'] == EventTypes.shipment_update.name
    assert response['data']['data']['type'] == 'Shipment'
    assert response['data']['data']['attributes']['carriers_scac'] == shipment.carriers_scac

    await communicator.disconnect()
github foonsun / fos_quant_platform / blockapps / duiqiao / tasks.py View on Github external
def run_duiqiao_policy(policy_id):
    print(policy_id)
    channel_layer = get_channel_layer()
    while True:
        try:
            policy = DuiQiaoPolicy.objects.get(id=policy_id)
        except:
            return
        print(policy)
        username = policy.user.username
        exchange = policy.exchange
        symbol = policy.symbol
        accesskey = policy.accesskey
        secretkey = policy.secretkey
        max_buy_price = policy.max_buy_price
        min_sell_price = policy.min_sell_price
        base_volume = policy.base_volume
        start_time = policy.start_time
        end_time = policy.end_time
github MrNaif2018 / bitcart / gui / tasks.py View on Github external
from django.utils import timezone
from asgiref.sync import async_to_sync
import dramatiq
from . import models
from channels.layers import get_channel_layer
from bitcart.coins.btc import BTC
import time
import traceback


RPC_URL = settings.RPC_URL
RPC_USER = settings.RPC_USER
RPC_PASS = settings.RPC_PASS
MAX_RETRIES = 3

channel_layer = get_channel_layer()


@dramatiq.actor(max_retries=MAX_RETRIES)
def poll_updates(invoice_id):
    obj = models.Invoice.objects.get(id=invoice_id)
    address = obj.bitcoin_address
    if not address:
        return
    btc_instance = BTC(RPC_URL, xpub=obj.products.all()[0].store.wallet.xpub,
                       rpc_user=RPC_USER, rpc_pass=RPC_PASS)
    while True:
        invoice_data = btc_instance.getrequest(address)
        if invoice_data["status"] != "Pending":
            if invoice_data["status"] == "Unknown":
                obj.status = "invalid"
            if invoice_data["status"] == "Expired":
github ShipChain / transmission / apps / routes / signals / tracking.py View on Github external
import logging

from asgiref.sync import async_to_sync
from channels.layers import get_channel_layer
from django.contrib.gis.geos import Point
from django.db.models.signals import pre_save, post_save
from django.dispatch import receiver
from django.urls import reverse
from fancy_cache.memory import find_urls

from apps.routes.models import RouteTrackingData
from apps.shipments.models import TransitState

LOG = logging.getLogger('transmission')

channel_layer = get_channel_layer()  # pylint:disable=invalid-name


@receiver(pre_save, sender=RouteTrackingData, dispatch_uid='routetrackingdata_pre_save')
def routetrackingdata_pre_save(sender, **kwargs):
    instance = kwargs["instance"]
    instance.point = Point(instance.longitude, instance.latitude)


@receiver(post_save, sender=RouteTrackingData, dispatch_uid='routetrackingdata_post_save')
def routetrackingdata_post_save(sender, **kwargs):
    instance = kwargs["instance"]
    LOG.debug(f'New tracking_data committed to db and will be pushed to the UI. Tracking_data: {instance.id}.')

    # Invalidate cached tracking data view for each shipment in Route
    for leg in instance.route.routeleg_set.filter(shipment__state=TransitState.IN_TRANSIT.value):
        tracking_get_url = reverse('shipment-tracking', kwargs={'version': 'v1', 'pk': leg.shipment.id})
github JBEI / edd / server / edd / load / notify / backend.py View on Github external
import logging

from asgiref.sync import async_to_sync, sync_to_async
from channels.layers import get_channel_layer

from edd.notify.backend import Notification
from edd.utilities import JSONEncoder

channel_layer = get_channel_layer()
logger = logging.getLogger(__name__)


class WsBroker:
    """
    A stateless broker that streams transient notifications directly to its
    groups without any storage.
    """

    def __init__(self, user, *args, **kwargs):
        self.user = user

    def notify(self, message, tags=None, payload=None, uuid=None):
        logger.debug(
            f'Notify: "{message}", tags: {tags}, uuid={uuid}, payload={payload}'
        )
github jaydenwindle / graphene-subscriptions / graphene_subscriptions / events.py View on Github external
def send(self):
        channel_layer = get_channel_layer()
        async_to_sync(channel_layer.group_send)(
            "subscriptions", {"type": "signal.fired", "event": self.to_dict()}
        )
github inoks / django-chatbot / app / tasks.py View on Github external
import time
from celery import shared_task
from channels.layers import get_channel_layer
from asgiref.sync import async_to_sync

channel_layer = get_channel_layer()

@shared_task
def add(x, y):
    async_to_sync(channel_layer.group_send)("calc", {"type": "calc.message", "message": '{} + {} = {}'.format(x, y, x + y)})
github hishnash / djangochannelsrestframework / djangochannelsrestframework / observer / observer.py View on Github external
def handle(self, signal, *args, **kwargs):
        message = self.serialize(signal, *args, **kwargs)
        channel_layer = get_channel_layer()
        for group_name in self.group_names_for_signal(*args, message=message, **kwargs):
            async_to_sync(channel_layer.group_send)(group_name, message)
github Bartzi / LabShare / labshare / utils.py View on Github external
def publish_device_state(device, channel_name=None):
    channel_layer = channels.layers.get_channel_layer()
    name = device.name
    device_data = device.serialize()
    if channel_name is None:
        send_function = async_to_sync(channel_layer.group_send)
    else:
        send_function = async_to_sync(channel_layer.send)
    send_function(channel_name if channel_name else name, {'type': 'update_info', 'message': json.dumps(device_data)})