Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def parse(self, client: 'Client'):
header_class = self.build_header()
buffer = client.recv(ctypes.sizeof(header_class))
header = header_class.from_buffer_copy(buffer)
fields = []
if header.status_code == OP_SUCCESS:
following = [
self.fields_or_field_count(),
('row_count', Int),
]
if self.has_cursor:
following.insert(0, ('cursor', Long))
body_struct = Struct(following)
body_class, body_buffer = body_struct.parse(client)
body = body_class.from_buffer_copy(body_buffer)
if self.include_field_names:
field_count = body.fields.length
else:
field_count = body.field_count
data_fields = []
data_buffer = b''
for i in range(body.row_count):
row_fields = []
row_buffer = b''
for j in range(field_count):
field_class, field_buffer = AnyDataObject.parse(client)
def parse(self, client: 'Client'):
header_class = self.build_header()
buffer = client.recv(ctypes.sizeof(header_class))
header = header_class.from_buffer_copy(buffer)
fields = []
if header.status_code == OP_SUCCESS:
following = [
self.fields_or_field_count(),
('row_count', Int),
]
if self.has_cursor:
following.insert(0, ('cursor', Long))
body_struct = Struct(following)
body_class, body_buffer = body_struct.parse(client)
body = body_class.from_buffer_copy(body_buffer)
if self.include_field_names:
field_count = body.fields.length
else:
field_count = body.field_count
data_fields = []
data_buffer = b''
for i in range(body.row_count):
row_fields = []
row_buffer = b''
for j in range(field_count):
field_class, field_buffer = AnyDataObject.parse(client)
result = {
'more': Bool.to_python(
ctype_object.more, *args, **kwargs
),
'data': [],
}
if hasattr(ctype_object, 'fields'):
result['fields'] = StringArray.to_python(
ctype_object.fields, *args, **kwargs
)
else:
result['field_count'] = Int.to_python(
ctype_object.field_count, *args, **kwargs
)
if hasattr(ctype_object, 'cursor'):
result['cursor'] = Long.to_python(
ctype_object.cursor, *args, **kwargs
)
for row_item in ctype_object.data._fields_:
row_name = row_item[0]
row_object = getattr(ctype_object.data, row_name)
row = []
for col_item in row_object._fields_:
col_name = col_item[0]
col_object = getattr(row_object, col_name)
row.append(
AnyDataObject.to_python(col_object, *args, **kwargs)
)
result['data'].append(row)
return result
[
('hash_code', Int),
('flag', Byte),
('peek_modes', PeekModes),
],
query_id=query_id,
)
result = query_struct.perform(
connection,
query_params={
'hash_code': cache_id(cache),
'flag': 1 if binary else 0,
'peek_modes': peek_modes,
},
response_config=[
('count', Long),
],
)
if result.status == 0:
result.value = result.value['count']
return result
is generated,
:return: API result data object. Contains zero status and a value
of type dict with results on success, non-zero status and an error
description otherwise.
Value dict is of following format:
* `data`: list, result values,
* `more`: bool, True if more data is available for subsequent
‘sql_fields_cursor_get_page’ calls.
"""
query_struct = Query(
OP_QUERY_SQL_FIELDS_CURSOR_GET_PAGE,
[
('cursor', Long),
],
query_id=query_id,
)
_, send_buffer = query_struct.from_python({
'cursor': cursor,
})
connection.send(send_buffer)
response_struct = Response([
('data', StructArray([
('field_{}'.format(i), AnyDataObject) for i in range(field_count)
])),
('more', Bool),
])