From 61e50eaaf220b6a6da0e69229463465f2083ca68 Mon Sep 17 00:00:00 2001 From: Daniel Hillier Date: Sun, 8 Nov 2020 17:15:32 +1100 Subject: [PATCH] Increase max bulk_batch_size The previous limit was a max of 1000 query parameters. This is changed to max of 1000 rows (the max allowed rows for inserting) or 2050 query parmeters (ms sql reports a max allowed of 2100 parameters but a few parameters are reserved for executing the query). --- sql_server/pyodbc/operations.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/sql_server/pyodbc/operations.py b/sql_server/pyodbc/operations.py index 64ef7e69..25e8457a 100644 --- a/sql_server/pyodbc/operations.py +++ b/sql_server/pyodbc/operations.py @@ -43,12 +43,15 @@ def bulk_batch_size(self, fields, objs): are the fields going to be inserted in the batch, the objs contains all the objects to be inserted. """ - objs_len, fields_len, max_row_values = len(objs), len(fields), 1000 - if (objs_len * fields_len) <= max_row_values: - size = objs_len - else: - size = max_row_values // fields_len - return size + fields_len = len(fields) + # MSSQL allows a query to have 2100 parameters but some parameters are + # taken up defining `NVARCHAR` parameters to store the query text and + # query parameters for the `sp_executesql` call. This should only take + # up 2 parameters but I've had this error when sending 2098 parameters. + max_query_params = 2050 + # inserts are capped at 1000 rows. Other operations do not have this + # limit. + return min(1000, max_query_params // fields_len) def bulk_insert_sql(self, fields, placeholder_rows): placeholder_rows_sql = (", ".join(row) for row in placeholder_rows)