Ensure that the macro query result should be download properly. Fixes #5965

This commit is contained in:
Rahul Shirsat
2020-12-14 11:58:53 +05:30
committed by Akshay Joshi
parent 296d22ad83
commit 6475a70514
18 changed files with 147 additions and 76 deletions

View File

@@ -728,27 +728,35 @@ WHERE db.datname = current_database()""")
if self.async_ == 1:
self._wait(cur.connection)
def execute_on_server_as_csv(self,
query, params=None,
formatted_exception_msg=False,
records=2000):
def execute_on_server_as_csv(self, params=None,
formatted_exception_msg=False, records=2000):
"""
To fetch query result and generate CSV output
Args:
query: SQL
params: Additional parameters
formatted_exception_msg: For exception
records: Number of initial records
Returns:
Generator response
"""
status, cur = self.__cursor()
self.row_count = 0
cur = self.__async_cursor
if not cur:
return False, self.CURSOR_NOT_FOUND
if not status:
return False, str(cur)
query_id = random.randint(1, 9999999)
if self.conn.isexecuting():
return False, gettext(
"Asynchronous query execution/operation underway."
)
encoding = self.python_encoding
query = None
try:
query = str(cur.query, encoding) \
if cur and cur.query is not None else None
except Exception:
current_app.logger.warning('Error encoding query')
pass
current_app.logger.log(
25,
@@ -757,13 +765,14 @@ WHERE db.datname = current_database()""")
server_id=self.manager.sid,
conn_id=self.conn_id,
query=query,
query_id=query_id
query_id=self.__async_query_id
)
)
try:
# Unregistering type casting for large size data types.
unregister_numeric_typecasters(self.conn)
self.__internal_blocking_execute(cur, query, params)
if self.async_ == 1:
self._wait(cur.connection)
except psycopg2.Error as pe:
cur.close()
errmsg = self._formatted_exception_msg(pe, formatted_exception_msg)
@@ -775,7 +784,7 @@ WHERE db.datname = current_database()""")
server_id=self.manager.sid,
conn_id=self.conn_id,
errmsg=errmsg,
query_id=query_id
query_id=self.__async_query_id
)
)
return False, errmsg
@@ -809,13 +818,12 @@ WHERE db.datname = current_database()""")
return results
def gen(quote='strings', quote_char="'", field_separator=',',
replace_nulls_with=None):
def gen(trans_obj, quote='strings', quote_char="'",
field_separator=',', replace_nulls_with=None):
cur.scroll(0, mode='absolute')
results = cur.fetchmany(records)
if not results:
if not cur.closed:
cur.close()
yield gettext('The query executed did not return any data.')
return
@@ -857,8 +865,6 @@ WHERE db.datname = current_database()""")
results = cur.fetchmany(records)
if not results:
if not cur.closed:
cur.close()
break
res_io = StringIO()
@@ -874,6 +880,17 @@ WHERE db.datname = current_database()""")
results = handle_null_values(results, replace_nulls_with)
csv_writer.writerows(results)
yield res_io.getvalue()
try:
# try to reset the cursor scroll back to where it was,
# bypass error, if cannot scroll back
rows_fetched_from = trans_obj.get_fetched_row_cnt()
cur.scroll(rows_fetched_from, mode='absolute')
except psycopg2.Error:
# bypassing the error as cursor tried to scroll on the
# specified position, but end of records found
pass
# Registering back type caster for large size data types to string
# which was unregistered at starting
register_string_typecasters(self.conn)
@@ -1224,6 +1241,7 @@ WHERE db.datname = current_database()""")
Args:
records: no of records to fetch. use -1 to fetchall.
formatted_exception_msg:
for_download: if True, will fetch all records and reset the cursor
Returns: