pyodbc.py 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674
  1. # dialects/mssql/pyodbc.py
  2. # Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
  3. # <see AUTHORS file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: https://www.opensource.org/licenses/mit-license.php
  7. r"""
  8. .. dialect:: mssql+pyodbc
  9. :name: PyODBC
  10. :dbapi: pyodbc
  11. :connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
  12. :url: https://pypi.org/project/pyodbc/
  13. Connecting to PyODBC
  14. --------------------
  15. The URL here is to be translated to PyODBC connection strings, as
  16. detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
  17. DSN Connections
  18. ^^^^^^^^^^^^^^^
  19. A DSN connection in ODBC means that a pre-existing ODBC datasource is
  20. configured on the client machine. The application then specifies the name
  21. of this datasource, which encompasses details such as the specific ODBC driver
  22. in use as well as the network address of the database. Assuming a datasource
  23. is configured on the client, a basic DSN-based connection looks like::
  24. engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
  25. Which above, will pass the following connection string to PyODBC::
  26. DSN=some_dsn;UID=scott;PWD=tiger
  27. If the username and password are omitted, the DSN form will also add
  28. the ``Trusted_Connection=yes`` directive to the ODBC string.
  29. Hostname Connections
  30. ^^^^^^^^^^^^^^^^^^^^
  31. Hostname-based connections are also supported by pyodbc. These are often
  32. easier to use than a DSN and have the additional advantage that the specific
  33. database name to connect towards may be specified locally in the URL, rather
  34. than it being fixed as part of a datasource configuration.
  35. When using a hostname connection, the driver name must also be specified in the
  36. query parameters of the URL. As these names usually have spaces in them, the
  37. name must be URL encoded which means using plus signs for spaces::
  38. engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server")
  39. The ``driver`` keyword is significant to the pyodbc dialect and must be
  40. specified in lowercase.
  41. Any other names passed in the query string are passed through in the pyodbc
  42. connect string, such as ``authentication``, ``TrustServerCertificate``, etc.
  43. Multiple keyword arguments must be separated by an ampersand (``&``); these
  44. will be translated to semicolons when the pyodbc connect string is generated
  45. internally::
  46. e = create_engine(
  47. "mssql+pyodbc://scott:tiger@mssql2017:1433/test?"
  48. "driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
  49. "&authentication=ActiveDirectoryIntegrated"
  50. )
  51. The equivalent URL can be constructed using :class:`_sa.engine.URL`::
  52. from sqlalchemy.engine import URL
  53. connection_url = URL.create(
  54. "mssql+pyodbc",
  55. username="scott",
  56. password="tiger",
  57. host="mssql2017",
  58. port=1433,
  59. database="test",
  60. query={
  61. "driver": "ODBC Driver 18 for SQL Server",
  62. "TrustServerCertificate": "yes",
  63. "authentication": "ActiveDirectoryIntegrated",
  64. },
  65. )
  66. Pass through exact Pyodbc string
  67. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  68. A PyODBC connection string can also be sent in pyodbc's format directly, as
  69. specified in `the PyODBC documentation
  70. <https://github.com/mkleehammer/pyodbc/wiki/Connecting-to-databases>`_,
  71. using the parameter ``odbc_connect``. A :class:`_sa.engine.URL` object
  72. can help make this easier::
  73. from sqlalchemy.engine import URL
  74. connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password"
  75. connection_url = URL.create("mssql+pyodbc", query={"odbc_connect": connection_string})
  76. engine = create_engine(connection_url)
  77. .. _mssql_pyodbc_access_tokens:
  78. Connecting to databases with access tokens
  79. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  80. Some database servers are set up to only accept access tokens for login. For
  81. example, SQL Server allows the use of Azure Active Directory tokens to connect
  82. to databases. This requires creating a credential object using the
  83. ``azure-identity`` library. More information about the authentication step can be
  84. found in `Microsoft's documentation
  85. <https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate?tabs=bash>`_.
  86. After getting an engine, the credentials need to be sent to ``pyodbc.connect``
  87. each time a connection is requested. One way to do this is to set up an event
  88. listener on the engine that adds the credential token to the dialect's connect
  89. call. This is discussed more generally in :ref:`engines_dynamic_tokens`. For
  90. SQL Server in particular, this is passed as an ODBC connection attribute with
  91. a data structure `described by Microsoft
  92. <https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_.
  93. The following code snippet will create an engine that connects to an Azure SQL
  94. database using Azure credentials::
  95. import struct
  96. from sqlalchemy import create_engine, event
  97. from sqlalchemy.engine.url import URL
  98. from azure import identity
  99. SQL_COPT_SS_ACCESS_TOKEN = 1256 # Connection option for access tokens, as defined in msodbcsql.h
  100. TOKEN_URL = "https://database.windows.net/" # The token URL for any Azure SQL database
  101. connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server"
  102. engine = create_engine(connection_string)
  103. azure_credentials = identity.DefaultAzureCredential()
  104. @event.listens_for(engine, "do_connect")
  105. def provide_token(dialect, conn_rec, cargs, cparams):
  106. # remove the "Trusted_Connection" parameter that SQLAlchemy adds
  107. cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "")
  108. # create token credential
  109. raw_token = azure_credentials.get_token(TOKEN_URL).token.encode("utf-16-le")
  110. token_struct = struct.pack(f"<I{len(raw_token)}s", len(raw_token), raw_token)
  111. # apply it to keyword arguments
  112. cparams["attrs_before"] = {SQL_COPT_SS_ACCESS_TOKEN: token_struct}
  113. .. tip::
  114. The ``Trusted_Connection`` token is currently added by the SQLAlchemy
  115. pyodbc dialect when no username or password is present. This needs
  116. to be removed per Microsoft's
  117. `documentation for Azure access tokens
  118. <https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_,
  119. stating that a connection string when using an access token must not contain
  120. ``UID``, ``PWD``, ``Authentication`` or ``Trusted_Connection`` parameters.
  121. .. _azure_synapse_ignore_no_transaction_on_rollback:
  122. Avoiding transaction-related exceptions on Azure Synapse Analytics
  123. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  124. Azure Synapse Analytics has a significant difference in its transaction
  125. handling compared to plain SQL Server; in some cases an error within a Synapse
  126. transaction can cause it to be arbitrarily terminated on the server side, which
  127. then causes the DBAPI ``.rollback()`` method (as well as ``.commit()``) to
  128. fail. The issue prevents the usual DBAPI contract of allowing ``.rollback()``
  129. to pass silently if no transaction is present as the driver does not expect
  130. this condition. The symptom of this failure is an exception with a message
  131. resembling 'No corresponding transaction found. (111214)' when attempting to
  132. emit a ``.rollback()`` after an operation had a failure of some kind.
  133. This specific case can be handled by passing ``ignore_no_transaction_on_rollback=True`` to
  134. the SQL Server dialect via the :func:`_sa.create_engine` function as follows::
  135. engine = create_engine(connection_url, ignore_no_transaction_on_rollback=True)
  136. Using the above parameter, the dialect will catch ``ProgrammingError``
  137. exceptions raised during ``connection.rollback()`` and emit a warning
  138. if the error message contains code ``111214``, however will not raise
  139. an exception.
  140. .. versionadded:: 1.4.40 Added the
  141. ``ignore_no_transaction_on_rollback=True`` parameter.
  142. Enable autocommit for Azure SQL Data Warehouse (DW) connections
  143. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  144. Azure SQL Data Warehouse does not support transactions,
  145. and that can cause problems with SQLAlchemy's "autobegin" (and implicit
  146. commit/rollback) behavior. We can avoid these problems by enabling autocommit
  147. at both the pyodbc and engine levels::
  148. connection_url = sa.engine.URL.create(
  149. "mssql+pyodbc",
  150. username="scott",
  151. password="tiger",
  152. host="dw.azure.example.com",
  153. database="mydb",
  154. query={
  155. "driver": "ODBC Driver 17 for SQL Server",
  156. "autocommit": "True",
  157. },
  158. )
  159. engine = create_engine(connection_url).execution_options(
  160. isolation_level="AUTOCOMMIT"
  161. )
  162. Avoiding sending large string parameters as TEXT/NTEXT
  163. ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
  164. By default, for historical reasons, Microsoft's ODBC drivers for SQL Server
  165. send long string parameters (greater than 4000 SBCS characters or 2000 Unicode
  166. characters) as TEXT/NTEXT values. TEXT and NTEXT have been deprecated for many
  167. years and are starting to cause compatibility issues with newer versions of
  168. SQL_Server/Azure. For example, see `this
  169. issue <https://github.com/mkleehammer/pyodbc/issues/835>`_.
  170. Starting with ODBC Driver 18 for SQL Server we can override the legacy
  171. behavior and pass long strings as varchar(max)/nvarchar(max) using the
  172. ``LongAsMax=Yes`` connection string parameter::
  173. connection_url = sa.engine.URL.create(
  174. "mssql+pyodbc",
  175. username="scott",
  176. password="tiger",
  177. host="mssqlserver.example.com",
  178. database="mydb",
  179. query={
  180. "driver": "ODBC Driver 18 for SQL Server",
  181. "LongAsMax": "Yes",
  182. },
  183. )
  184. Pyodbc Pooling / connection close behavior
  185. ------------------------------------------
  186. PyODBC uses internal `pooling
  187. <https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ by
  188. default, which means connections will be longer lived than they are within
  189. SQLAlchemy itself. As SQLAlchemy has its own pooling behavior, it is often
  190. preferable to disable this behavior. This behavior can only be disabled
  191. globally at the PyODBC module level, **before** any connections are made::
  192. import pyodbc
  193. pyodbc.pooling = False
  194. # don't use the engine before pooling is set to False
  195. engine = create_engine("mssql+pyodbc://user:pass@dsn")
  196. If this variable is left at its default value of ``True``, **the application
  197. will continue to maintain active database connections**, even when the
  198. SQLAlchemy engine itself fully discards a connection or if the engine is
  199. disposed.
  200. .. seealso::
  201. `pooling <https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ -
  202. in the PyODBC documentation.
  203. Driver / Unicode Support
  204. -------------------------
  205. PyODBC works best with Microsoft ODBC drivers, particularly in the area
  206. of Unicode support on both Python 2 and Python 3.
  207. Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not**
  208. recommended; there have been historically many Unicode-related issues
  209. in this area, including before Microsoft offered ODBC drivers for Linux
  210. and OSX. Now that Microsoft offers drivers for all platforms, for
  211. PyODBC support these are recommended. FreeTDS remains relevant for
  212. non-ODBC drivers such as pymssql where it works very well.
  213. Rowcount Support
  214. ----------------
  215. Pyodbc only has partial support for rowcount. See the notes at
  216. :ref:`mssql_rowcount_versioning` for important notes when using ORM
  217. versioning.
  218. .. _mssql_pyodbc_fastexecutemany:
  219. Fast Executemany Mode
  220. ---------------------
  221. The Pyodbc driver has added support for a "fast executemany" mode of execution
  222. which greatly reduces round trips for a DBAPI ``executemany()`` call when using
  223. Microsoft ODBC drivers, for **limited size batches that fit in memory**. The
  224. feature is enabled by setting the flag ``.fast_executemany`` on the DBAPI
  225. cursor when an executemany call is to be used. The SQLAlchemy pyodbc SQL
  226. Server dialect supports setting this flag automatically when the
  227. ``.fast_executemany`` flag is passed to
  228. :func:`_sa.create_engine` ; note that the ODBC driver must be the Microsoft
  229. driver in order to use this flag::
  230. engine = create_engine(
  231. "mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+13+for+SQL+Server",
  232. fast_executemany=True)
  233. .. warning:: The pyodbc fast_executemany mode **buffers all rows in memory** and is
  234. not compatible with very large batches of data. A future version of SQLAlchemy
  235. may support this flag as a per-execution option instead.
  236. .. versionadded:: 1.3
  237. .. seealso::
  238. `fast executemany <https://github.com/mkleehammer/pyodbc/wiki/Features-beyond-the-DB-API#fast_executemany>`_
  239. - on github
  240. .. _mssql_pyodbc_setinputsizes:
  241. Setinputsizes Support
  242. -----------------------
  243. The pyodbc ``cursor.setinputsizes()`` method can be used if necessary. To
  244. enable this hook, pass ``use_setinputsizes=True`` to :func:`_sa.create_engine`::
  245. engine = create_engine("mssql+pyodbc://...", use_setinputsizes=True)
  246. The behavior of the hook can then be customized, as may be necessary
  247. particularly if fast_executemany is in use, via the
  248. :meth:`.DialectEvents.do_setinputsizes` hook. See that method for usage
  249. examples.
  250. .. versionchanged:: 1.4.1 The pyodbc dialects will not use setinputsizes
  251. unless ``use_setinputsizes=True`` is passed.
  252. """ # noqa
  253. import datetime
  254. import decimal
  255. import re
  256. import struct
  257. from .base import BINARY
  258. from .base import DATETIMEOFFSET
  259. from .base import MSDialect
  260. from .base import MSExecutionContext
  261. from .base import VARBINARY
  262. from ... import exc
  263. from ... import types as sqltypes
  264. from ... import util
  265. from ...connectors.pyodbc import PyODBCConnector
  266. class _ms_numeric_pyodbc(object):
  267. """Turns Decimals with adjusted() < 0 or > 7 into strings.
  268. The routines here are needed for older pyodbc versions
  269. as well as current mxODBC versions.
  270. """
  271. def bind_processor(self, dialect):
  272. super_process = super(_ms_numeric_pyodbc, self).bind_processor(dialect)
  273. if not dialect._need_decimal_fix:
  274. return super_process
  275. def process(value):
  276. if self.asdecimal and isinstance(value, decimal.Decimal):
  277. adjusted = value.adjusted()
  278. if adjusted < 0:
  279. return self._small_dec_to_string(value)
  280. elif adjusted > 7:
  281. return self._large_dec_to_string(value)
  282. if super_process:
  283. return super_process(value)
  284. else:
  285. return value
  286. return process
  287. # these routines needed for older versions of pyodbc.
  288. # as of 2.1.8 this logic is integrated.
  289. def _small_dec_to_string(self, value):
  290. return "%s0.%s%s" % (
  291. (value < 0 and "-" or ""),
  292. "0" * (abs(value.adjusted()) - 1),
  293. "".join([str(nint) for nint in value.as_tuple()[1]]),
  294. )
  295. def _large_dec_to_string(self, value):
  296. _int = value.as_tuple()[1]
  297. if "E" in str(value):
  298. result = "%s%s%s" % (
  299. (value < 0 and "-" or ""),
  300. "".join([str(s) for s in _int]),
  301. "0" * (value.adjusted() - (len(_int) - 1)),
  302. )
  303. else:
  304. if (len(_int) - 1) > value.adjusted():
  305. result = "%s%s.%s" % (
  306. (value < 0 and "-" or ""),
  307. "".join([str(s) for s in _int][0 : value.adjusted() + 1]),
  308. "".join([str(s) for s in _int][value.adjusted() + 1 :]),
  309. )
  310. else:
  311. result = "%s%s" % (
  312. (value < 0 and "-" or ""),
  313. "".join([str(s) for s in _int][0 : value.adjusted() + 1]),
  314. )
  315. return result
  316. class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
  317. pass
  318. class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
  319. pass
  320. class _ms_binary_pyodbc(object):
  321. """Wraps binary values in dialect-specific Binary wrapper.
  322. If the value is null, return a pyodbc-specific BinaryNull
  323. object to prevent pyODBC [and FreeTDS] from defaulting binary
  324. NULL types to SQLWCHAR and causing implicit conversion errors.
  325. """
  326. def bind_processor(self, dialect):
  327. if dialect.dbapi is None:
  328. return None
  329. DBAPIBinary = dialect.dbapi.Binary
  330. def process(value):
  331. if value is not None:
  332. return DBAPIBinary(value)
  333. else:
  334. # pyodbc-specific
  335. return dialect.dbapi.BinaryNull
  336. return process
  337. class _ODBCDateTimeBindProcessor(object):
  338. """Add bind processors to handle datetimeoffset behaviors"""
  339. has_tz = False
  340. def bind_processor(self, dialect):
  341. def process(value):
  342. if value is None:
  343. return None
  344. elif isinstance(value, util.string_types):
  345. # if a string was passed directly, allow it through
  346. return value
  347. elif not value.tzinfo or (not self.timezone and not self.has_tz):
  348. # for DateTime(timezone=False)
  349. return value
  350. else:
  351. # for DATETIMEOFFSET or DateTime(timezone=True)
  352. #
  353. # Convert to string format required by T-SQL
  354. dto_string = value.strftime("%Y-%m-%d %H:%M:%S.%f %z")
  355. # offset needs a colon, e.g., -0700 -> -07:00
  356. # "UTC offset in the form (+-)HHMM[SS[.ffffff]]"
  357. # backend currently rejects seconds / fractional seconds
  358. dto_string = re.sub(
  359. r"([\+\-]\d{2})([\d\.]+)$", r"\1:\2", dto_string
  360. )
  361. return dto_string
  362. return process
  363. class _ODBCDateTime(_ODBCDateTimeBindProcessor, sqltypes.DateTime):
  364. pass
  365. class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET):
  366. has_tz = True
  367. class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY):
  368. pass
  369. class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY):
  370. pass
  371. class MSExecutionContext_pyodbc(MSExecutionContext):
  372. _embedded_scope_identity = False
  373. def pre_exec(self):
  374. """where appropriate, issue "select scope_identity()" in the same
  375. statement.
  376. Background on why "scope_identity()" is preferable to "@@identity":
  377. https://msdn.microsoft.com/en-us/library/ms190315.aspx
  378. Background on why we attempt to embed "scope_identity()" into the same
  379. statement as the INSERT:
  380. https://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
  381. """
  382. super(MSExecutionContext_pyodbc, self).pre_exec()
  383. # don't embed the scope_identity select into an
  384. # "INSERT .. DEFAULT VALUES"
  385. if (
  386. self._select_lastrowid
  387. and self.dialect.use_scope_identity
  388. and len(self.parameters[0])
  389. ):
  390. self._embedded_scope_identity = True
  391. self.statement += "; select scope_identity()"
  392. def post_exec(self):
  393. if self._embedded_scope_identity:
  394. # Fetch the last inserted id from the manipulated statement
  395. # We may have to skip over a number of result sets with
  396. # no data (due to triggers, etc.)
  397. while True:
  398. try:
  399. # fetchall() ensures the cursor is consumed
  400. # without closing it (FreeTDS particularly)
  401. row = self.cursor.fetchall()[0]
  402. break
  403. except self.dialect.dbapi.Error:
  404. # no way around this - nextset() consumes the previous set
  405. # so we need to just keep flipping
  406. self.cursor.nextset()
  407. self._lastrowid = int(row[0])
  408. else:
  409. super(MSExecutionContext_pyodbc, self).post_exec()
  410. class MSDialect_pyodbc(PyODBCConnector, MSDialect):
  411. supports_statement_cache = True
  412. # mssql still has problems with this on Linux
  413. supports_sane_rowcount_returning = False
  414. execution_ctx_cls = MSExecutionContext_pyodbc
  415. colspecs = util.update_copy(
  416. MSDialect.colspecs,
  417. {
  418. sqltypes.Numeric: _MSNumeric_pyodbc,
  419. sqltypes.Float: _MSFloat_pyodbc,
  420. BINARY: _BINARY_pyodbc,
  421. # support DateTime(timezone=True)
  422. sqltypes.DateTime: _ODBCDateTime,
  423. DATETIMEOFFSET: _ODBCDATETIMEOFFSET,
  424. # SQL Server dialect has a VARBINARY that is just to support
  425. # "deprecate_large_types" w/ VARBINARY(max), but also we must
  426. # handle the usual SQL standard VARBINARY
  427. VARBINARY: _VARBINARY_pyodbc,
  428. sqltypes.VARBINARY: _VARBINARY_pyodbc,
  429. sqltypes.LargeBinary: _VARBINARY_pyodbc,
  430. },
  431. )
  432. def __init__(
  433. self, description_encoding=None, fast_executemany=False, **params
  434. ):
  435. if "description_encoding" in params:
  436. self.description_encoding = params.pop("description_encoding")
  437. super(MSDialect_pyodbc, self).__init__(**params)
  438. self.use_scope_identity = (
  439. self.use_scope_identity
  440. and self.dbapi
  441. and hasattr(self.dbapi.Cursor, "nextset")
  442. )
  443. self._need_decimal_fix = self.dbapi and self._dbapi_version() < (
  444. 2,
  445. 1,
  446. 8,
  447. )
  448. self.fast_executemany = fast_executemany
  449. def _get_server_version_info(self, connection):
  450. try:
  451. # "Version of the instance of SQL Server, in the form
  452. # of 'major.minor.build.revision'"
  453. raw = connection.exec_driver_sql(
  454. "SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)"
  455. ).scalar()
  456. except exc.DBAPIError:
  457. # SQL Server docs indicate this function isn't present prior to
  458. # 2008. Before we had the VARCHAR cast above, pyodbc would also
  459. # fail on this query.
  460. return super(MSDialect_pyodbc, self)._get_server_version_info(
  461. connection, allow_chars=False
  462. )
  463. else:
  464. version = []
  465. r = re.compile(r"[.\-]")
  466. for n in r.split(raw):
  467. try:
  468. version.append(int(n))
  469. except ValueError:
  470. pass
  471. return tuple(version)
  472. def on_connect(self):
  473. super_ = super(MSDialect_pyodbc, self).on_connect()
  474. def on_connect(conn):
  475. if super_ is not None:
  476. super_(conn)
  477. self._setup_timestampoffset_type(conn)
  478. return on_connect
  479. def _setup_timestampoffset_type(self, connection):
  480. # output converter function for datetimeoffset
  481. def _handle_datetimeoffset(dto_value):
  482. tup = struct.unpack("<6hI2h", dto_value)
  483. return datetime.datetime(
  484. tup[0],
  485. tup[1],
  486. tup[2],
  487. tup[3],
  488. tup[4],
  489. tup[5],
  490. tup[6] // 1000,
  491. util.timezone(
  492. datetime.timedelta(hours=tup[7], minutes=tup[8])
  493. ),
  494. )
  495. odbc_SQL_SS_TIMESTAMPOFFSET = -155 # as defined in SQLNCLI.h
  496. connection.add_output_converter(
  497. odbc_SQL_SS_TIMESTAMPOFFSET, _handle_datetimeoffset
  498. )
  499. def do_executemany(self, cursor, statement, parameters, context=None):
  500. if self.fast_executemany:
  501. cursor.fast_executemany = True
  502. super(MSDialect_pyodbc, self).do_executemany(
  503. cursor, statement, parameters, context=context
  504. )
  505. def is_disconnect(self, e, connection, cursor):
  506. if isinstance(e, self.dbapi.Error):
  507. code = e.args[0]
  508. if code in {
  509. "08S01",
  510. "01000",
  511. "01002",
  512. "08003",
  513. "08007",
  514. "08S02",
  515. "08001",
  516. "HYT00",
  517. "HY010",
  518. "10054",
  519. }:
  520. return True
  521. return super(MSDialect_pyodbc, self).is_disconnect(
  522. e, connection, cursor
  523. )
  524. dialect = MSDialect_pyodbc