Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
def test_issue193_splitting_function():
sql = """ CREATE FUNCTION a(x VARCHAR(20)) RETURNS VARCHAR(20)
BEGIN
DECLARE y VARCHAR(20);
RETURN x;
END;
SELECT * FROM a.b;"""
statements = sqlparse.split(sql)
assert len(statements) == 2
def test_psql_quotation_marks():
# issue83
# regression: make sure plain $$ work
t = sqlparse.split("""
CREATE OR REPLACE FUNCTION testfunc1(integer) RETURNS integer AS $$
....
$$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION testfunc2(integer) RETURNS integer AS $$
....
$$ LANGUAGE plpgsql;""")
assert len(t) == 2
# make sure $SOMETHING$ works too
t = sqlparse.split("""
CREATE OR REPLACE FUNCTION testfunc1(integer) RETURNS integer AS $PROC_1$
....
$PROC_1$ LANGUAGE plpgsql;
CREATE OR REPLACE FUNCTION testfunc2(integer) RETURNS integer AS $PROC_2$
....
$PROC_2$ LANGUAGE plpgsql;""")
assert len(t) == 2
def test_split_cursor_declare():
sql = ('DECLARE CURSOR "foo" AS SELECT 1;\n'
'SELECT 2;')
stmts = sqlparse.split(sql)
assert len(stmts) == 2
def convert_to_sqlalchemy_statement(raw_sql_script):
"""Convert raw SQL into SQLAlchemy statement."""
# remove comment and tail spaces
formated_sql_script = sqlparse.format(
raw_sql_script.strip(), strip_comments=True)
return sqlparse.split(formated_sql_script)
def tokenize_content(file_path, content):
"""
Take a SQL file with multiple statements,
returns a list of blocks.
"""
sql_blocks = sqlparse.split(content)
sql_blocks = filter(None, sql_blocks)
if not sql_blocks:
return
def builder(content, has_body=False):
return Block(file_path, has_body, content)
for block in sql_blocks:
# would have used `yield from' below but py2...
for r in _extract_parts(builder, block.split('\n')):
yield r
def query_check(self, db_name=None, sql=''):
# 查询语句的检查、注释去除、切分
result = {'msg': '', 'bad_query': False, 'filtered_sql': sql, 'has_star': False}
keyword_warning = ''
sql_whitelist = ['select', 'explain']
# 根据白名单list拼接pattern语句
whitelist_pattern = "^" + "|^".join(sql_whitelist)
# 删除注释语句,进行语法判断,执行第一条有效sql
try:
sql = sql.format(sql, strip_comments=True)
sql = sqlparse.split(sql)[0]
result['filtered_sql'] = sql.strip()
# sql_lower = sql.lower()
except IndexError:
result['bad_query'] = True
result['msg'] = '没有有效的SQL语句'
return result
if re.match(whitelist_pattern, sql) is None:
result['bad_query'] = True
result['msg'] = '仅支持{}语法!'.format(','.join(sql_whitelist))
return result
if result.get('bad_query'):
result['msg'] = keyword_warning
return result
def _reorder_types(self, types_script):
"""
Takes type scripts and reorders them to avoid Type doesn't exist exception
"""
self._logger.debug('Running types definitions scripts')
self._logger.debug('Reordering types definitions scripts to avoid "type does not exist" exceptions')
_type_statements = sqlparse.split(types_script)
# TODO: move up to classes
_type_statements_dict = {} # dictionary that store statements with type and order.
type_unordered_scripts = [] # scripts to execute without order
type_drop_scripts = [] # drop scripts to execute first
for _type_statement in _type_statements:
_type_statement_parsed = sqlparse.parse(_type_statement)
if len(_type_statement_parsed) > 0: # can be empty parsed object so need to check
# we need only type declarations to be ordered
if _type_statement_parsed[0].get_type() == 'CREATE':
_type_body_r = r'\bcreate\s+\b(?:type|domain)\s+\b(\w+\.\w+|\w+)\b'
_type_name = re.compile(_type_body_r, flags=re.IGNORECASE).findall(_type_statement)[0]
_type_statements_dict[str(_type_name)] = \
{'script': _type_statement, 'deps': []}
elif _type_statement_parsed[0].get_type() == 'DROP':
type_drop_scripts.append(_type_statement)
else:
v_omnidb_database = OmniDatabase.Generic.InstantiateDatabase(
'sqlite',
'',
'',
settings.OMNIDB_DATABASE,
'',
'',
'0',
''
)
log_start_time = datetime.now()
log_status = 'success'
try:
list_sql = sqlparse.split(v_sql)
v_data_return = ''
run_command_list = True
if v_mode==0:
v_database.v_connection.v_autocommit = v_autocommit
if not v_database.v_connection.v_con or v_database.v_connection.GetConStatus() == 0:
v_database.v_connection.Open()
else:
v_database.v_connection.v_start=True
if v_mode == 1 or v_mode ==2:
v_table = v_database.v_connection.QueryBlock('', 50, True, True)
#need to stop again
if not v_database.v_connection.v_start or len(v_table.Rows)>=50:
v_data_return += '\n' + v_table.Pretty(v_database.v_connection.v_expanded) + '\n' + v_database.v_connection.GetStatus()
def check_sqlnumber(self, data):
"""单次最大支持1000条DML和DDL语句提交"""
sql_list = [sql for sql in sqlparse.split(data)]
if len(sql_list) > 1000:
return False, len(sql_list)
return True, None
indexes.add((p_table.name, col_b.altname))
if clauses:
sql += 'LEFT JOIN "{}" ON {} '.format(p_table.name, " AND ".join(clauses) )
if where:
# FIXME: Really lightweight injection prevention. For the most part, we don't care much,
# since there isn't any private data in a warehouse database, but we do want to prevent
# dropping tables, etc.
where = where.split(';',1)[0]
sql += " WHERE {} ".format(where)
# One more sanitization
sql = sqlparse.split(sql)[0]
for table, col in indexes:
self.create_index(table, [col])
self.logger.info("Creating indexed table {} with: \n{}".format(table_name, sql))
self.install_view(table_name, sql, data=dict(), type_='indexed', doc=doc)