Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
function _serializeObject(value: SerializableObject, type: ContainerType, output: Buffer, start: number): number {
let fixedIndex = start;
const fixedLength = type.fields
.map(([, fieldType]) => isVariableSizeType(fieldType) ? BYTES_PER_LENGTH_PREFIX : fixedSize(fieldType))
.reduce((a, b) => a + b, 0);
let currentOffsetIndex = start + fixedLength;
let nextOffsetIndex = currentOffsetIndex;
for (const [fieldName, fieldType] of type.fields) {
if (isVariableSizeType(fieldType)) {
// field type is variable-size
// write serialized element to variable section
nextOffsetIndex = _serialize(value[fieldName], fieldType, output, currentOffsetIndex);
// write offset
output.writeUIntLE(currentOffsetIndex - start, fixedIndex, BYTES_PER_LENGTH_PREFIX);
// update offset
currentOffsetIndex = nextOffsetIndex;
fixedIndex += BYTES_PER_LENGTH_PREFIX;
} else {
fixedIndex = _serialize(value[fieldName], fieldType, output, fixedIndex);
}
}
return currentOffsetIndex;
}
function _serializeArray(value: SerializableArray, type: ArrayType, output: Buffer, start: number): number {
let index = start;
if (isVariableSizeType(type.elementType)) {
// all elements are variable-size
let fixedIndex = index;
let currentOffsetIndex = start + value.length * BYTES_PER_LENGTH_PREFIX;
let nextOffsetIndex = currentOffsetIndex;
for (const v of value) {
// write serialized element to variable section
nextOffsetIndex = _serialize(v, type.elementType, output, currentOffsetIndex);
// write offset
output.writeUIntLE(currentOffsetIndex - start, fixedIndex, BYTES_PER_LENGTH_PREFIX);
// update offset
currentOffsetIndex = nextOffsetIndex;
fixedIndex += BYTES_PER_LENGTH_PREFIX;
}
index = currentOffsetIndex;
} else {
// all elements are fixed-size
function _deserializeArray(data: Buffer, type: ArrayType, start: number, end: number): SerializableArray {
const value: SerializableArray = [];
if (start === end) {
return value;
}
if (isVariableSizeType(type.elementType)) {
// all elements variable-sized
// indices contain offsets
let currentIndex = start;
let nextIndex = currentIndex;
// data exists between offsets
const firstOffset = start + data.readUIntLE(start, BYTES_PER_LENGTH_PREFIX);
let currentOffset = firstOffset;
let nextOffset = currentOffset;
// read off offsets, deserializing values until we hit the first offset index
for (; currentIndex < firstOffset;) {
assert(currentOffset <= end, "Offset out of bounds");
nextIndex = currentIndex + BYTES_PER_LENGTH_PREFIX;
nextOffset = nextIndex === firstOffset
? end
: start + data.readUIntLE(nextIndex, BYTES_PER_LENGTH_PREFIX);
assert(currentOffset <= nextOffset, "Offsets must be increasing");
const fixedSizes: (number | false)[] = type.fields.map(([, fieldType]) =>
!isVariableSizeType(fieldType) && fixedSize(fieldType));
// with the fixed sizes, we can read the offsets, and store for later
.map(([, fieldType]) => isVariableSizeType(fieldType) ? BYTES_PER_LENGTH_PREFIX : fixedSize(fieldType))
.reduce((a, b) => a + b, 0);
export function deserialize(data: Buffer, type: AnySSZType): any {
const _type = parseType(type);
if (!isVariableSizeType(_type)) {
assert(fixedSize(_type) === data.length, "Incorrect data length");
}
return _deserialize(data, _type, 0, data.length);
}
export function size(type: FullSSZType, value: SerializableValue): number {
return isVariableSizeType(type) ? variableSize(type, value) : fixedSize(type);
}
.map((v) =>
size((type as VectorType).elementType, v) +
(isVariableSizeType(type.elementType) ? BYTES_PER_LENGTH_PREFIX : 0))
.reduce((a, b) => a + b, 0);