Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
target_addr, start_height, remark=remark):
ldata = tx['txDataHex']
LOGGER.info('Handling TX in block %s' % tx['height'])
try:
ddata = bytes.fromhex(ldata).decode('utf-8')
last_height = tx['height']
jdata = json.loads(ddata)
context = {"chain_name": CHAIN_NAME,
"tx_hash": tx['hash'],
"height": tx['height'],
"time": tx['createTime'],
"publisher": tx["coinFroms"][0]['address']}
yield (jdata, context)
except json.JSONDecodeError:
# if it's not valid json, just ignore it...
LOGGER.info("Incoming logic data is not JSON, ignoring. %r"
% ldata)
if last_height:
await Chain.set_last_height(CHAIN_NAME, last_height)
async def get_json(hash, timeout=1, tries=1):
result = await get_ipfs_content(hash, timeout=timeout, tries=tries)
if result is not None and result != -1:
try:
result = await loop.run_in_executor(None, json.loads, result)
except json.JSONDecodeError:
try:
import json as njson
result = await loop.run_in_executor(None, njson.loads, result)
except (json.JSONDecodeError, KeyError):
LOGGER.exception("Can't decode JSON")
result = -1 # never retry, bogus data
return result
async def incoming_check(ipfs_pubsub_message):
""" Verifies an incoming message is sane, protecting from spam in the
meantime.
TODO: actually implement this, no check done here yet. IMPORTANT.
"""
try:
message = json.loads(ipfs_pubsub_message.get('data', ''))
LOGGER.debug("New message! %r" % message)
message = await check_message(message, from_network=True)
return message
except json.JSONDecodeError:
LOGGER.exception('Received non-json message %r'
% ipfs_pubsub_message.get('data', ''))
target_addr, start_time):
ldata = tx['memo']
LOGGER.info('Handling TX in block %s' % tx['blockHeight'])
try:
tx_time = dateutil.parser.parse(tx['timeStamp']).timestamp()
last_time = dateutil.parser.parse(tx['timeStamp'])
jdata = json.loads(ldata)
context = {"chain_name": CHAIN_NAME,
"tx_hash": tx['txHash'],
"height": tx['blockHeight'],
"time": tx_time,
"publisher": tx["fromAddr"]}
yield (jdata, context)
except json.JSONDecodeError:
# if it's not valid json, just ignore it...
LOGGER.info("Incoming logic data is not JSON, ignoring. %r"
% ldata)
if last_time:
await Chain.set_last_time(CHAIN_NAME, last_time)
async def get_json(hash, engine='ipfs', timeout=2, tries=1):
loop = asyncio.get_event_loop()
content = await get_hash_content(hash, engine=engine,
timeout=timeout, tries=tries)
if content is not None and content != -1:
try:
# if len(content) > 100000:
content = await loop.run_in_executor(None, json.loads, content)
# else:
# content = json.loads(content)
except json.JSONDecodeError:
try:
import json as njson
content = await loop.run_in_executor(None, njson.loads, content)
except (json.JSONDecodeError, KeyError):
LOGGER.exception("Can't decode JSON")
content = -1 # never retry, bogus data
return content
async def make_request(self, request_structure):
streams = [(peer, item) for peer, sublist in self.peers.items() for item in sublist]
random.shuffle(streams)
while True:
for i, (peer, (stream, semaphore)) in enumerate(streams):
if not semaphore.locked():
async with semaphore:
try:
# stream = await asyncio.wait_for(singleton.host.new_stream(peer_id, [PROTOCOL_ID]), connect_timeout)
await stream.write(json.dumps(request_structure))
value = await stream.read(MAX_READ_LEN)
# # await stream.close()
try:
value = json.loads(value)
except json.JSONDecodeError:
value = None
continue
if value.get('content') is None:
# remove all streams from that peer, ask to the others.
for speer, info in list(streams):
if speer == peer:
streams.remove((speer, info))
break
return value
except (StreamError):
# let's delete this stream so it gets recreated next time
# await stream.close()
await stream.reset()
streams.remove((peer, (stream, semaphore)))
last_height = event_data.blockNumber
#block = await loop.run_in_executor(None, web3.eth.getBlock, event_data.blockNumber)
#timestamp = block.timestamp
message = event_data.args.message
try:
jdata = json.loads(message)
context = {"chain_name": CHAIN_NAME,
"tx_hash": event_data.transactionHash,
"time": timestamp,
"height": event_data.blockNumber,
"publisher": publisher}
yield (jdata, context)
except json.JSONDecodeError:
# if it's not valid json, just ignore it...
LOGGER.info("Incoming logic data is not JSON, ignoring. %r"
% message)
except Exception:
LOGGER.exception("Can't decode incoming logic data %r"
% message)
# Since we got no critical exception, save last received object
# block height to do next requests from there.
if last_height:
await Chain.set_last_height(CHAIN_NAME, last_height)