Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def generate_collations():
explicit_params = {}
for period in itertools.count():
default_params = {
"shard_id": 0,
"period": period,
"body": zpad_right(b"body%d" % period, COLLATION_SIZE),
"proposer_address": zpad_right(b"proposer%d" % period, 20),
}
# only calculate chunk root if it wouldn't be replaced anyway
if "chunk_root" not in explicit_params:
default_params["chunk_root"] = calc_chunk_root(default_params["body"])
params = merge(default_params, explicit_params)
header = CollationHeader(
shard_id=params["shard_id"],
chunk_root=params["chunk_root"],
period=params["period"],
proposer_address=params["proposer_address"],
)
collation = Collation(header, params["body"])
explicit_params = (yield collation) or {}
def test_call_checks_nonce(vm):
computation, _ = vm.apply_transaction(SIGNED_DEFAULT_TRANSACTION)
assert computation.is_success
computation, _ = vm.apply_transaction(SIGNED_DEFAULT_TRANSACTION)
assert computation.is_error
transaction = UnsignedUserAccountTransaction(**merge(DEFAULT_TX_PARAMS, {
"nonce": 2,
})).as_signed_transaction(PRIVATE_KEY)
computation, _ = vm.apply_transaction(transaction)
assert computation.is_error
+--------------------+------------------------------------------------------------+
"""
execution = normalize_execution(execution or {})
# user caller as origin if not explicitly given
if "caller" in execution and "origin" not in execution:
execution = assoc(execution, "origin", execution["caller"])
if "vyperLLLCode" in execution:
code = compile_vyper_lll(execution["vyperLLLCode"])
if "code" in execution:
if code != execution["code"]:
raise ValueError("Compiled Vyper LLL code does not match")
execution = assoc(execution, "code", code)
execution = merge(DEFAULT_EXECUTION, execution)
test_name = get_test_name(filler)
return deep_merge(
filler,
{
test_name: {
"exec": execution,
}
from cytoolz import (
merge,
)
from eth import precompiles
from eth._utils.address import (
force_bytes_to_address,
)
from eth.vm.forks.frontier.computation import FRONTIER_PRECOMPILES
from eth.vm.forks.spurious_dragon.computation import SpuriousDragonComputation
from .opcodes import BYZANTIUM_OPCODES
BYZANTIUM_PRECOMPILES = merge(
FRONTIER_PRECOMPILES,
{
force_bytes_to_address(b'\x05'): precompiles.modexp,
force_bytes_to_address(b'\x06'): precompiles.ecadd,
force_bytes_to_address(b'\x07'): precompiles.ecmul,
force_bytes_to_address(b'\x08'): precompiles.ecpairing,
},
)
class ByzantiumComputation(SpuriousDragonComputation):
"""
A class for all execution computations in the ``Byzantium`` fork.
Inherits from :class:`~eth.vm.forks.spurious_dragon.computation.SpuriousDragonComputation`
"""
# Override
call,
)
from eth.vm.forks.frontier.opcodes import FRONTIER_OPCODES
NEW_OPCODES = {
opcode_values.DELEGATECALL: call.DelegateCall.configure(
__name__='opcode:DELEGATECALL',
mnemonic=mnemonics.DELEGATECALL,
gas_cost=constants.GAS_CALL,
)(),
}
HOMESTEAD_OPCODES = merge(
copy.deepcopy(FRONTIER_OPCODES),
NEW_OPCODES
)
Blockwise
fuse
"""
layers = graph.layers.copy()
dependencies = graph.dependencies.copy()
dependents = reverse_dict(dependencies)
for name, layer in graph.layers.items():
deps = graph.dependencies[name]
if (
isinstance(layer, Blockwise)
and len(deps) > 1
and not any(dependencies[dep] for dep in deps) # no need to fuse if 0 or 1
and all(len(dependents[dep]) == 1 for dep in deps)
):
new = toolz.merge(layer, *[layers[dep] for dep in deps])
new, _ = fuse(new, keys, ave_width=len(deps))
for dep in deps:
del layers[dep]
layers[name] = new
dependencies[name] = set()
return HighLevelGraph(layers, dependencies)
gas_cost=GAS_EXP_EIP160,
),
opcode_values.SELFDESTRUCT: as_opcode(
logic_fn=system.selfdestruct_eip161,
mnemonic=mnemonics.SELFDESTRUCT,
gas_cost=GAS_SELFDESTRUCT_EIP150,
),
opcode_values.CALL: call.CallEIP161.configure(
__name__='opcode:CALL',
mnemonic=mnemonics.CALL,
gas_cost=GAS_CALL_EIP150,
)(),
}
SPURIOUS_DRAGON_OPCODES = merge(
copy.deepcopy(TANGERINE_WHISTLE_OPCODES),
UPDATED_OPCODES,
)
seen = {}
sub = {} # like {_0: _0, _1: _0, _2: _1}
for i, x in enumerate(indices):
if x[1] is not None and x in seen:
sub[i] = seen[x]
else:
if x[1] is not None:
seen[x] = len(new_indices)
sub[i] = len(new_indices)
new_indices.append(x)
sub = {blockwise_token(k): blockwise_token(v) for k, v in sub.items()}
dsk = {k: subs(v, sub) for k, v in dsk.items()}
indices_check = {k for k, v in indices if v is not None}
numblocks = toolz.merge([inp.numblocks for inp in inputs.values()])
numblocks = {k: v for k, v in numblocks.items() if v is None or k in indices_check}
out = Blockwise(
root,
inputs[root].output_indices,
dsk,
new_indices,
numblocks=numblocks,
new_axes=new_axes,
concatenate=concatenate,
)
return out
'type': tf.int64,
'ops': [],
'schema': (),
}
# Extract the main input dataset definitions
input_ds = {"inputs": datasets.pop("inputs")}
with tf.Graph().as_default() as graph:
# Now create source datasets composed of maps
# and main input dataset composed of a queue
with tf.device("/cpu:0"):
src_ds = create_datasets(datasets, placeholders, "map")
input_ds = create_datasets(input_ds, placeholders, "queue")
dataset_info = merge(input_ds, src_ds)
src_maps = {ds_name: ds.tensor_map for ds_name, ds
in src_ds.items()}
# Create an expression for each device
exprs = []
key_idx = []
# Get the main input dataset
in_ds = dataset_info["inputs"].dataset
output_map = TensorMap(tuple(o['type'] for o in outputs.values()),
name="output_map")
with tf.device("/cpu:0"):
self._output_map_pop_key = tf.placeholder(tf.int64)
self._output_map_pop = output_map.pop(self._output_map_pop_key,
logic_fn=ensure_no_static(storage.sstore),
mnemonic=mnemonics.SSTORE,
gas_cost=constants.GAS_NULL,
),
#
# Self Destruct
#
opcode_values.SELFDESTRUCT: as_opcode(
logic_fn=ensure_no_static(system.selfdestruct_eip161),
mnemonic=mnemonics.SELFDESTRUCT,
gas_cost=GAS_SELFDESTRUCT_EIP150,
),
}
BYZANTIUM_OPCODES = merge(
copy.deepcopy(SPURIOUS_DRAGON_OPCODES),
UPDATED_OPCODES,
)