Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
SteamUser.prototype._handlerManager.add(SteamUser.EMsg.ClientFromGC, function(body) {
let msgType = body.msgtype & ~PROTO_MASK;
let targetJobID;
let payload;
if (body.msgtype & PROTO_MASK) {
// This is a protobuf message
let headerLength = body.payload.readInt32LE(4);
let protoHeader = Messages.decodeProto(Schema.CMsgProtoBufHeader, body.payload.slice(8, 8 + headerLength));
targetJobID = protoHeader.job_id_target || JOBID_NONE;
payload = body.payload.slice(8 + headerLength);
} else {
let header = ByteBuffer.wrap(body.payload.slice(0, 18));
targetJobID = header.readUint64(2);
payload = body.payload.slice(18);
}
this.emit('debug', `Received ${body.appid} GC message ${msgType}`);
if (targetJobID && this._jobsGC[targetJobID]) {
this._jobsGC[targetJobID].call(this, body.appid, msgType, payload);
} else {
this.emit('receivedFromGC', body.appid, msgType, payload);
this.emit('recievedFromGC', body.appid, msgType, payload);
}
});
ResourceFinder.prototype.processResourceTable = function(resourceBuffer) {
const bb = ByteBuffer.wrap(resourceBuffer, "binary", true);
// Resource table structure
var type = bb.readShort(),
headerSize = bb.readShort(),
size = bb.readInt(),
packageCount = bb.readInt(),
buffer,
bb2;
if (type != RES_TABLE_TYPE) {
throw new Error("No RES_TABLE_TYPE found!");
}
if (size != bb.limit) {
throw new Error("The buffer size not matches to the resource table size.");
}
bb.offset = headerSize;
function decode_message(bytes_buffer) {
/*
* Decode theheader and message object
*/
var header_len = bytes_buffer[1];
var header_bytes_buffer = bytes_buffer.slice(2,2+header_len);
// Now double check the header
var header_bb = ByteBuffer.wrap(header_bytes_buffer);
var header = message.Header.decode(header_bb);
var msg = message.Message.decode(ByteBuffer.wrap(bytes_buffer.slice(header_len+3)));
return {'header': header, 'message': msg};
}
cleanAttachment(attachment) {
return {
..._.omit(attachment, "thumbnail"),
id: attachment.id.toString(),
key: attachment.key
? ByteBuffer.wrap(attachment.key, "base64").toString("base64")
: null,
digest: attachment.digest
? ByteBuffer.wrap(attachment.digest, "base64").toString("base64")
: null
};
}
if (this._handlers[msgType]) {
handler = this._handlers[msgType];
}
let msgName = msgType;
for (let i in Language) {
if (Language.hasOwnProperty(i) && Language[i] == msgType) {
msgName = i;
break;
}
}
this.emit('debug', "Got " + (handler ? "handled" : "unhandled") + " GC message " + msgName + (isProtobuf ? " (protobuf)" : ""));
if (handler) {
handler.call(this, isProtobuf ? payload : ByteBuffer.wrap(payload, ByteBuffer.LITTLE_ENDIAN));
}
});
(hash: any) => {
resolve(ByteBuffer.wrap(hash));
}
);
export function toString(thing) {
if (typeof thing === 'string') {
return thing;
}
return new ByteBuffer.wrap(thing).toString('binary');
}
function fromEncodedBinaryToArrayBuffer(key) {
return ByteBuffer.wrap(key, "binary").toArrayBuffer();
}
function base64ToArrayBuffer(string) {
return ByteBuffer.wrap(string, "base64").toArrayBuffer();
}
_transform(chunk: Buffer, encoding: string, callback: TransformCallback) {
if (!this.option.enableCompression) {
this.push(chunk);
callback();
return;
}
const message = ByteBuffer.wrap(chunk);
const dataLength = message.readVarint32();
if (dataLength === 0 || dataLength < this.option.compressionThreshold) {
this.push(message.buffer.slice(message.offset));
callback();
} else {
const compressedContent = message.buffer.slice(message.offset);
unzip(compressedContent, (err, result) => {
if (err) {
callback(err);
} else {
this.push(result);
callback();
}
});
}
}