Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def post(self, *args, **kw):
self.requests.append((args, kw))
io = BytesIO()
for chunk in self.data:
msgpack.dump(chunk, io)
io.seek(0, 0)
class Response:
raw = io
ok = True
return Response
def convert(infile, outfile):
if not outfile:
ext = infile.split('.')[-1]
outfile = '%s%s' % (infile[:-len(ext)-1], EXT)
print('%s > %s' % (infile, outfile))
print('reading in JSON')
with open(infile) as op:
data = json.load(op)
print('writing to msgpack')
with open(outfile, 'wb') as op:
msgpack.dump(data, op)
params[i] = param
logger.debug('Saving alpha with parameter: {!r}'.format(param))
if ftype == 'csv':
alpha.to_csv(os.path.join(outdir, 'alpha'+str(i)))
elif ftype == 'pickle':
alpha.to_pickle(os.path.join(outdir, 'alpha'+str(i)))
elif ftype == 'msgpack':
alpha.to_msgpack(os.path.join(outdir, 'alpha'+str(i)))
with open(os.path.join(outdir, 'params.json'), 'w') as file:
if ftype == 'csv':
json.dump(params, file)
elif ftype == 'pickle':
cPickle.dump(params, file)
elif ftype == 'msgpack':
msgpack.dump(params, file)
'vocab_ent': vocab_ent,
'embedding': embeddings.tolist(),
'wv_cased': args.wv_cased,
}
with open('SQuAD/meta.msgpack', 'wb') as f:
msgpack.dump(meta, f)
result = {
'train': train,
'dev': dev
}
# train: id, context_id, context_features, tag_id, ent_id,
# question_id, context, context_token_span, answer_start, answer_end
# dev: id, context_id, context_features, tag_id, ent_id,
# question_id, context, context_token_span, answer
with open('SQuAD/data.msgpack', 'wb') as f:
msgpack.dump(result, f)
if args.sample_size:
sample = {
'train': train[:args.sample_size],
'dev': dev[:args.sample_size]
}
with open('SQuAD/sample.msgpack', 'wb') as f:
msgpack.dump(sample, f)
log.info('saved to disk.')
nscf=True,
kpts=atoms.info['kpts'] * [2, 2, 1],
DeltaE=0.01,
slab=True,
Emin=-40,
Emax=40,
tetrahedra=False,
sigma=0.2)
dos = list(dos)
array_to_list(dos)
# If outfile, write a MessagePack encoded version to disk
if out_file:
with open(out_file, 'w') as f:
msgpack.dump(dos, f)
# Return a BSON friendly version
return json.dumps(dos, encoding='utf-8')
def dump(self, f):
values = list(map(lambda x: x[1], self.get()))
msgpack.dump(values, f)
def __init__(self, persist_path=None):
"""
persist_path: if provided, perform serialization to/from disk to this path
"""
self.persist_path = persist_path
self.dump = partial(msgpack.dump, default=msgpack_serialize)
self.load = partial(msgpack.load, ext_hook=msgpack_deserialize)
self.dict = {}
elif args["--remove"]:
for url in urls:
remove(database, url, tags)
#msgpack.dump(database, open(d_file, 'wb'))
elif args["--tags"]:
result = list(list_every_tags(database))
result.sort()
for tag in result:
print(tag)
elif args["TAG"]:
for url in urls:
add(database, url, tags)
msgpack.dump(database, open(d_file, 'wb'))
else:
for url in urls:
for tag in database[url]:
print(tag)
def serializeToFile(self, fname, annotations):
"""
Overwritten to write Msgpack files.
"""
# TODO make all image filenames relative to the label file
import msgpack
f = open(fname, "w")
msgpack.dump(annotations, f)
self.history = msgpack.load(file(filename, 'rb'), raw=False)
except:
#
# The history file might still be in pickle format, we read
# it and migrate to msgpack
#
try:
self.history = cPickle.load(file(filename, 'rb'))
except:
#
# Well... the file is completely broken, just write an
# empty string to it using msgpack to have a nicer run
# the next time the user executes the GUI
#
self.history = {}
msgpack.dump({}, file(filename, 'wb'))
else:
#
# We were able to read using pickle, migrate the file to
# msgpack to prevent deserialization issues
# https://github.com/andresriancho/w3af/issues/17807
#
msgpack.dump(self.history, file(filename, 'wb'))