asyncpg.py 35 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139
  1. # dialects/postgresql/asyncpg.py
  2. # Copyright (C) 2005-2024 the SQLAlchemy authors and contributors <see AUTHORS
  3. # file>
  4. #
  5. # This module is part of SQLAlchemy and is released under
  6. # the MIT License: https://www.opensource.org/licenses/mit-license.php
  7. r"""
  8. .. dialect:: postgresql+asyncpg
  9. :name: asyncpg
  10. :dbapi: asyncpg
  11. :connectstring: postgresql+asyncpg://user:password@host:port/dbname[?key=value&key=value...]
  12. :url: https://magicstack.github.io/asyncpg/
  13. The asyncpg dialect is SQLAlchemy's first Python asyncio dialect.
  14. Using a special asyncio mediation layer, the asyncpg dialect is usable
  15. as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
  16. extension package.
  17. This dialect should normally be used only with the
  18. :func:`_asyncio.create_async_engine` engine creation function::
  19. from sqlalchemy.ext.asyncio import create_async_engine
  20. engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname")
  21. The dialect can also be run as a "synchronous" dialect within the
  22. :func:`_sa.create_engine` function, which will pass "await" calls into
  23. an ad-hoc event loop. This mode of operation is of **limited use**
  24. and is for special testing scenarios only. The mode can be enabled by
  25. adding the SQLAlchemy-specific flag ``async_fallback`` to the URL
  26. in conjunction with :func:`_sa.create_engine`::
  27. # for testing purposes only; do not use in production!
  28. engine = create_engine("postgresql+asyncpg://user:pass@hostname/dbname?async_fallback=true")
  29. .. versionadded:: 1.4
  30. .. note::
  31. By default asyncpg does not decode the ``json`` and ``jsonb`` types and
  32. returns them as strings. SQLAlchemy sets default type decoder for ``json``
  33. and ``jsonb`` types using the python builtin ``json.loads`` function.
  34. The json implementation used can be changed by setting the attribute
  35. ``json_deserializer`` when creating the engine with
  36. :func:`create_engine` or :func:`create_async_engine`.
  37. .. _asyncpg_prepared_statement_cache:
  38. Prepared Statement Cache
  39. --------------------------
  40. The asyncpg SQLAlchemy dialect makes use of ``asyncpg.connection.prepare()``
  41. for all statements. The prepared statement objects are cached after
  42. construction which appears to grant a 10% or more performance improvement for
  43. statement invocation. The cache is on a per-DBAPI connection basis, which
  44. means that the primary storage for prepared statements is within DBAPI
  45. connections pooled within the connection pool. The size of this cache
  46. defaults to 100 statements per DBAPI connection and may be adjusted using the
  47. ``prepared_statement_cache_size`` DBAPI argument (note that while this argument
  48. is implemented by SQLAlchemy, it is part of the DBAPI emulation portion of the
  49. asyncpg dialect, therefore is handled as a DBAPI argument, not a dialect
  50. argument)::
  51. engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=500")
  52. To disable the prepared statement cache, use a value of zero::
  53. engine = create_async_engine("postgresql+asyncpg://user:pass@hostname/dbname?prepared_statement_cache_size=0")
  54. .. versionadded:: 1.4.0b2 Added ``prepared_statement_cache_size`` for asyncpg.
  55. .. warning:: The ``asyncpg`` database driver necessarily uses caches for
  56. PostgreSQL type OIDs, which become stale when custom PostgreSQL datatypes
  57. such as ``ENUM`` objects are changed via DDL operations. Additionally,
  58. prepared statements themselves which are optionally cached by SQLAlchemy's
  59. driver as described above may also become "stale" when DDL has been emitted
  60. to the PostgreSQL database which modifies the tables or other objects
  61. involved in a particular prepared statement.
  62. The SQLAlchemy asyncpg dialect will invalidate these caches within its local
  63. process when statements that represent DDL are emitted on a local
  64. connection, but this is only controllable within a single Python process /
  65. database engine. If DDL changes are made from other database engines
  66. and/or processes, a running application may encounter asyncpg exceptions
  67. ``InvalidCachedStatementError`` and/or ``InternalServerError("cache lookup
  68. failed for type <oid>")`` if it refers to pooled database connections which
  69. operated upon the previous structures. The SQLAlchemy asyncpg dialect will
  70. recover from these error cases when the driver raises these exceptions by
  71. clearing its internal caches as well as those of the asyncpg driver in
  72. response to them, but cannot prevent them from being raised in the first
  73. place if the cached prepared statement or asyncpg type caches have gone
  74. stale, nor can it retry the statement as the PostgreSQL transaction is
  75. invalidated when these errors occur.
  76. Disabling the PostgreSQL JIT to improve ENUM datatype handling
  77. ---------------------------------------------------------------
  78. Asyncpg has an `issue <https://github.com/MagicStack/asyncpg/issues/727>`_ when
  79. using PostgreSQL ENUM datatypes, where upon the creation of new database
  80. connections, an expensive query may be emitted in order to retrieve metadata
  81. regarding custom types which has been shown to negatively affect performance.
  82. To mitigate this issue, the PostgreSQL "jit" setting may be disabled from the
  83. client using this setting passed to :func:`_asyncio.create_async_engine`::
  84. engine = create_async_engine(
  85. "postgresql+asyncpg://user:password@localhost/tmp",
  86. connect_args={"server_settings": {"jit": "off"}},
  87. )
  88. .. seealso::
  89. https://github.com/MagicStack/asyncpg/issues/727
  90. """ # noqa
  91. import collections
  92. import decimal
  93. import json as _py_json
  94. import re
  95. import time
  96. from . import json
  97. from .base import _DECIMAL_TYPES
  98. from .base import _FLOAT_TYPES
  99. from .base import _INT_TYPES
  100. from .base import ENUM
  101. from .base import INTERVAL
  102. from .base import OID
  103. from .base import PGCompiler
  104. from .base import PGDialect
  105. from .base import PGExecutionContext
  106. from .base import PGIdentifierPreparer
  107. from .base import REGCLASS
  108. from .base import UUID
  109. from ... import exc
  110. from ... import pool
  111. from ... import processors
  112. from ... import util
  113. from ...engine import AdaptedConnection
  114. from ...sql import sqltypes
  115. from ...util.concurrency import asyncio
  116. from ...util.concurrency import await_fallback
  117. from ...util.concurrency import await_only
  118. try:
  119. from uuid import UUID as _python_UUID # noqa
  120. except ImportError:
  121. _python_UUID = None
  122. class AsyncpgTime(sqltypes.Time):
  123. def get_dbapi_type(self, dbapi):
  124. if self.timezone:
  125. return dbapi.TIME_W_TZ
  126. else:
  127. return dbapi.TIME
  128. class AsyncpgDate(sqltypes.Date):
  129. def get_dbapi_type(self, dbapi):
  130. return dbapi.DATE
  131. class AsyncpgDateTime(sqltypes.DateTime):
  132. def get_dbapi_type(self, dbapi):
  133. if self.timezone:
  134. return dbapi.TIMESTAMP_W_TZ
  135. else:
  136. return dbapi.TIMESTAMP
  137. class AsyncpgBoolean(sqltypes.Boolean):
  138. def get_dbapi_type(self, dbapi):
  139. return dbapi.BOOLEAN
  140. class AsyncPgInterval(INTERVAL):
  141. def get_dbapi_type(self, dbapi):
  142. return dbapi.INTERVAL
  143. @classmethod
  144. def adapt_emulated_to_native(cls, interval, **kw):
  145. return AsyncPgInterval(precision=interval.second_precision)
  146. class AsyncPgEnum(ENUM):
  147. def get_dbapi_type(self, dbapi):
  148. return dbapi.ENUM
  149. class AsyncpgInteger(sqltypes.Integer):
  150. def get_dbapi_type(self, dbapi):
  151. return dbapi.INTEGER
  152. class AsyncpgBigInteger(sqltypes.BigInteger):
  153. def get_dbapi_type(self, dbapi):
  154. return dbapi.BIGINTEGER
  155. class AsyncpgJSON(json.JSON):
  156. def get_dbapi_type(self, dbapi):
  157. return dbapi.JSON
  158. def result_processor(self, dialect, coltype):
  159. return None
  160. class AsyncpgJSONB(json.JSONB):
  161. def get_dbapi_type(self, dbapi):
  162. return dbapi.JSONB
  163. def result_processor(self, dialect, coltype):
  164. return None
  165. class AsyncpgJSONIndexType(sqltypes.JSON.JSONIndexType):
  166. def get_dbapi_type(self, dbapi):
  167. raise NotImplementedError("should not be here")
  168. class AsyncpgJSONIntIndexType(sqltypes.JSON.JSONIntIndexType):
  169. def get_dbapi_type(self, dbapi):
  170. return dbapi.INTEGER
  171. class AsyncpgJSONStrIndexType(sqltypes.JSON.JSONStrIndexType):
  172. def get_dbapi_type(self, dbapi):
  173. return dbapi.STRING
  174. class AsyncpgJSONPathType(json.JSONPathType):
  175. def bind_processor(self, dialect):
  176. def process(value):
  177. assert isinstance(value, util.collections_abc.Sequence)
  178. tokens = [util.text_type(elem) for elem in value]
  179. return tokens
  180. return process
  181. class AsyncpgUUID(UUID):
  182. def get_dbapi_type(self, dbapi):
  183. return dbapi.UUID
  184. def bind_processor(self, dialect):
  185. if not self.as_uuid and dialect.use_native_uuid:
  186. def process(value):
  187. if value is not None:
  188. value = _python_UUID(value)
  189. return value
  190. return process
  191. def result_processor(self, dialect, coltype):
  192. if not self.as_uuid and dialect.use_native_uuid:
  193. def process(value):
  194. if value is not None:
  195. value = str(value)
  196. return value
  197. return process
  198. class AsyncpgNumeric(sqltypes.Numeric):
  199. def get_dbapi_type(self, dbapi):
  200. return dbapi.NUMBER
  201. def bind_processor(self, dialect):
  202. return None
  203. def result_processor(self, dialect, coltype):
  204. if self.asdecimal:
  205. if coltype in _FLOAT_TYPES:
  206. return processors.to_decimal_processor_factory(
  207. decimal.Decimal, self._effective_decimal_return_scale
  208. )
  209. elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
  210. # pg8000 returns Decimal natively for 1700
  211. return None
  212. else:
  213. raise exc.InvalidRequestError(
  214. "Unknown PG numeric type: %d" % coltype
  215. )
  216. else:
  217. if coltype in _FLOAT_TYPES:
  218. # pg8000 returns float natively for 701
  219. return None
  220. elif coltype in _DECIMAL_TYPES or coltype in _INT_TYPES:
  221. return processors.to_float
  222. else:
  223. raise exc.InvalidRequestError(
  224. "Unknown PG numeric type: %d" % coltype
  225. )
  226. class AsyncpgFloat(AsyncpgNumeric):
  227. def get_dbapi_type(self, dbapi):
  228. return dbapi.FLOAT
  229. class AsyncpgREGCLASS(REGCLASS):
  230. def get_dbapi_type(self, dbapi):
  231. return dbapi.STRING
  232. class AsyncpgOID(OID):
  233. def get_dbapi_type(self, dbapi):
  234. return dbapi.INTEGER
  235. class PGExecutionContext_asyncpg(PGExecutionContext):
  236. def handle_dbapi_exception(self, e):
  237. if isinstance(
  238. e,
  239. (
  240. self.dialect.dbapi.InvalidCachedStatementError,
  241. self.dialect.dbapi.InternalServerError,
  242. ),
  243. ):
  244. self.dialect._invalidate_schema_cache()
  245. def pre_exec(self):
  246. if self.isddl:
  247. self.dialect._invalidate_schema_cache()
  248. self.cursor._invalidate_schema_cache_asof = (
  249. self.dialect._invalidate_schema_cache_asof
  250. )
  251. if not self.compiled:
  252. return
  253. # we have to exclude ENUM because "enum" not really a "type"
  254. # we can cast to, it has to be the name of the type itself.
  255. # for now we just omit it from casting
  256. self.exclude_set_input_sizes = {AsyncAdapt_asyncpg_dbapi.ENUM}
  257. def create_server_side_cursor(self):
  258. return self._dbapi_connection.cursor(server_side=True)
  259. class PGCompiler_asyncpg(PGCompiler):
  260. pass
  261. class PGIdentifierPreparer_asyncpg(PGIdentifierPreparer):
  262. pass
  263. class AsyncAdapt_asyncpg_cursor:
  264. __slots__ = (
  265. "_adapt_connection",
  266. "_connection",
  267. "_rows",
  268. "description",
  269. "arraysize",
  270. "rowcount",
  271. "_inputsizes",
  272. "_cursor",
  273. "_invalidate_schema_cache_asof",
  274. )
  275. server_side = False
  276. def __init__(self, adapt_connection):
  277. self._adapt_connection = adapt_connection
  278. self._connection = adapt_connection._connection
  279. self._rows = []
  280. self._cursor = None
  281. self.description = None
  282. self.arraysize = 1
  283. self.rowcount = -1
  284. self._inputsizes = None
  285. self._invalidate_schema_cache_asof = 0
  286. def close(self):
  287. self._rows[:] = []
  288. def _handle_exception(self, error):
  289. self._adapt_connection._handle_exception(error)
  290. def _parameter_placeholders(self, params):
  291. if not self._inputsizes:
  292. return tuple("$%d" % idx for idx, _ in enumerate(params, 1))
  293. else:
  294. return tuple(
  295. "$%d::%s" % (idx, typ) if typ else "$%d" % idx
  296. for idx, typ in enumerate(
  297. (_pg_types.get(typ) for typ in self._inputsizes), 1
  298. )
  299. )
  300. async def _prepare_and_execute(self, operation, parameters):
  301. adapt_connection = self._adapt_connection
  302. async with adapt_connection._execute_mutex:
  303. if not adapt_connection._started:
  304. await adapt_connection._start_transaction()
  305. if parameters is not None:
  306. operation = operation % self._parameter_placeholders(
  307. parameters
  308. )
  309. else:
  310. parameters = ()
  311. try:
  312. prepared_stmt, attributes = await adapt_connection._prepare(
  313. operation, self._invalidate_schema_cache_asof
  314. )
  315. if attributes:
  316. self.description = [
  317. (
  318. attr.name,
  319. attr.type.oid,
  320. None,
  321. None,
  322. None,
  323. None,
  324. None,
  325. )
  326. for attr in attributes
  327. ]
  328. else:
  329. self.description = None
  330. if self.server_side:
  331. self._cursor = await prepared_stmt.cursor(*parameters)
  332. self.rowcount = -1
  333. else:
  334. self._rows = await prepared_stmt.fetch(*parameters)
  335. status = prepared_stmt.get_statusmsg()
  336. reg = re.match(
  337. r"(?:SELECT|UPDATE|DELETE|INSERT \d+) (\d+)", status
  338. )
  339. if reg:
  340. self.rowcount = int(reg.group(1))
  341. else:
  342. self.rowcount = -1
  343. except Exception as error:
  344. self._handle_exception(error)
  345. async def _executemany(self, operation, seq_of_parameters):
  346. adapt_connection = self._adapt_connection
  347. async with adapt_connection._execute_mutex:
  348. await adapt_connection._check_type_cache_invalidation(
  349. self._invalidate_schema_cache_asof
  350. )
  351. if not adapt_connection._started:
  352. await adapt_connection._start_transaction()
  353. operation = operation % self._parameter_placeholders(
  354. seq_of_parameters[0]
  355. )
  356. try:
  357. return await self._connection.executemany(
  358. operation, seq_of_parameters
  359. )
  360. except Exception as error:
  361. self._handle_exception(error)
  362. def execute(self, operation, parameters=None):
  363. self._adapt_connection.await_(
  364. self._prepare_and_execute(operation, parameters)
  365. )
  366. def executemany(self, operation, seq_of_parameters):
  367. return self._adapt_connection.await_(
  368. self._executemany(operation, seq_of_parameters)
  369. )
  370. def setinputsizes(self, *inputsizes):
  371. self._inputsizes = inputsizes
  372. def __iter__(self):
  373. while self._rows:
  374. yield self._rows.pop(0)
  375. def fetchone(self):
  376. if self._rows:
  377. return self._rows.pop(0)
  378. else:
  379. return None
  380. def fetchmany(self, size=None):
  381. if size is None:
  382. size = self.arraysize
  383. retval = self._rows[0:size]
  384. self._rows[:] = self._rows[size:]
  385. return retval
  386. def fetchall(self):
  387. retval = self._rows[:]
  388. self._rows[:] = []
  389. return retval
  390. class AsyncAdapt_asyncpg_ss_cursor(AsyncAdapt_asyncpg_cursor):
  391. server_side = True
  392. __slots__ = ("_rowbuffer",)
  393. def __init__(self, adapt_connection):
  394. super(AsyncAdapt_asyncpg_ss_cursor, self).__init__(adapt_connection)
  395. self._rowbuffer = None
  396. def close(self):
  397. self._cursor = None
  398. self._rowbuffer = None
  399. def _buffer_rows(self):
  400. new_rows = self._adapt_connection.await_(self._cursor.fetch(50))
  401. self._rowbuffer = collections.deque(new_rows)
  402. def __aiter__(self):
  403. return self
  404. async def __anext__(self):
  405. if not self._rowbuffer:
  406. self._buffer_rows()
  407. while True:
  408. while self._rowbuffer:
  409. yield self._rowbuffer.popleft()
  410. self._buffer_rows()
  411. if not self._rowbuffer:
  412. break
  413. def fetchone(self):
  414. if not self._rowbuffer:
  415. self._buffer_rows()
  416. if not self._rowbuffer:
  417. return None
  418. return self._rowbuffer.popleft()
  419. def fetchmany(self, size=None):
  420. if size is None:
  421. return self.fetchall()
  422. if not self._rowbuffer:
  423. self._buffer_rows()
  424. buf = list(self._rowbuffer)
  425. lb = len(buf)
  426. if size > lb:
  427. buf.extend(
  428. self._adapt_connection.await_(self._cursor.fetch(size - lb))
  429. )
  430. result = buf[0:size]
  431. self._rowbuffer = collections.deque(buf[size:])
  432. return result
  433. def fetchall(self):
  434. ret = list(self._rowbuffer) + list(
  435. self._adapt_connection.await_(self._all())
  436. )
  437. self._rowbuffer.clear()
  438. return ret
  439. async def _all(self):
  440. rows = []
  441. # TODO: looks like we have to hand-roll some kind of batching here.
  442. # hardcoding for the moment but this should be improved.
  443. while True:
  444. batch = await self._cursor.fetch(1000)
  445. if batch:
  446. rows.extend(batch)
  447. continue
  448. else:
  449. break
  450. return rows
  451. def executemany(self, operation, seq_of_parameters):
  452. raise NotImplementedError(
  453. "server side cursor doesn't support executemany yet"
  454. )
  455. class AsyncAdapt_asyncpg_connection(AdaptedConnection):
  456. __slots__ = (
  457. "dbapi",
  458. "_connection",
  459. "isolation_level",
  460. "_isolation_setting",
  461. "readonly",
  462. "deferrable",
  463. "_transaction",
  464. "_started",
  465. "_prepared_statement_cache",
  466. "_invalidate_schema_cache_asof",
  467. "_execute_mutex",
  468. )
  469. await_ = staticmethod(await_only)
  470. def __init__(self, dbapi, connection, prepared_statement_cache_size=100):
  471. self.dbapi = dbapi
  472. self._connection = connection
  473. self.isolation_level = self._isolation_setting = "read_committed"
  474. self.readonly = False
  475. self.deferrable = False
  476. self._transaction = None
  477. self._started = False
  478. self._invalidate_schema_cache_asof = time.time()
  479. self._execute_mutex = asyncio.Lock()
  480. if prepared_statement_cache_size:
  481. self._prepared_statement_cache = util.LRUCache(
  482. prepared_statement_cache_size
  483. )
  484. else:
  485. self._prepared_statement_cache = None
  486. async def _check_type_cache_invalidation(self, invalidate_timestamp):
  487. if invalidate_timestamp > self._invalidate_schema_cache_asof:
  488. await self._connection.reload_schema_state()
  489. self._invalidate_schema_cache_asof = invalidate_timestamp
  490. async def _prepare(self, operation, invalidate_timestamp):
  491. await self._check_type_cache_invalidation(invalidate_timestamp)
  492. cache = self._prepared_statement_cache
  493. if cache is None:
  494. prepared_stmt = await self._connection.prepare(operation)
  495. attributes = prepared_stmt.get_attributes()
  496. return prepared_stmt, attributes
  497. # asyncpg uses a type cache for the "attributes" which seems to go
  498. # stale independently of the PreparedStatement itself, so place that
  499. # collection in the cache as well.
  500. if operation in cache:
  501. prepared_stmt, attributes, cached_timestamp = cache[operation]
  502. # preparedstatements themselves also go stale for certain DDL
  503. # changes such as size of a VARCHAR changing, so there is also
  504. # a cross-connection invalidation timestamp
  505. if cached_timestamp > invalidate_timestamp:
  506. return prepared_stmt, attributes
  507. prepared_stmt = await self._connection.prepare(operation)
  508. attributes = prepared_stmt.get_attributes()
  509. cache[operation] = (prepared_stmt, attributes, time.time())
  510. return prepared_stmt, attributes
  511. def _handle_exception(self, error):
  512. if self._connection.is_closed():
  513. self._transaction = None
  514. self._started = False
  515. if not isinstance(error, AsyncAdapt_asyncpg_dbapi.Error):
  516. exception_mapping = self.dbapi._asyncpg_error_translate
  517. for super_ in type(error).__mro__:
  518. if super_ in exception_mapping:
  519. translated_error = exception_mapping[super_](
  520. "%s: %s" % (type(error), error)
  521. )
  522. translated_error.pgcode = (
  523. translated_error.sqlstate
  524. ) = getattr(error, "sqlstate", None)
  525. raise translated_error from error
  526. else:
  527. raise error
  528. else:
  529. raise error
  530. @property
  531. def autocommit(self):
  532. return self.isolation_level == "autocommit"
  533. @autocommit.setter
  534. def autocommit(self, value):
  535. if value:
  536. self.isolation_level = "autocommit"
  537. else:
  538. self.isolation_level = self._isolation_setting
  539. def set_isolation_level(self, level):
  540. if self._started:
  541. self.rollback()
  542. self.isolation_level = self._isolation_setting = level
  543. async def _start_transaction(self):
  544. if self.isolation_level == "autocommit":
  545. return
  546. try:
  547. self._transaction = self._connection.transaction(
  548. isolation=self.isolation_level,
  549. readonly=self.readonly,
  550. deferrable=self.deferrable,
  551. )
  552. await self._transaction.start()
  553. except Exception as error:
  554. self._handle_exception(error)
  555. else:
  556. self._started = True
  557. def cursor(self, server_side=False):
  558. if server_side:
  559. return AsyncAdapt_asyncpg_ss_cursor(self)
  560. else:
  561. return AsyncAdapt_asyncpg_cursor(self)
  562. async def _rollback_and_discard(self):
  563. try:
  564. await self._transaction.rollback()
  565. finally:
  566. # if asyncpg .rollback() was actually called, then whether or
  567. # not it raised or succeeded, the transation is done, discard it
  568. self._transaction = None
  569. self._started = False
  570. async def _commit_and_discard(self):
  571. try:
  572. await self._transaction.commit()
  573. finally:
  574. # if asyncpg .commit() was actually called, then whether or
  575. # not it raised or succeeded, the transation is done, discard it
  576. self._transaction = None
  577. self._started = False
  578. def rollback(self):
  579. if self._started:
  580. try:
  581. self.await_(self._rollback_and_discard())
  582. self._transaction = None
  583. self._started = False
  584. except Exception as error:
  585. # don't dereference asyncpg transaction if we didn't
  586. # actually try to call rollback() on it
  587. self._handle_exception(error)
  588. def commit(self):
  589. if self._started:
  590. try:
  591. self.await_(self._commit_and_discard())
  592. self._transaction = None
  593. self._started = False
  594. except Exception as error:
  595. # don't dereference asyncpg transaction if we didn't
  596. # actually try to call commit() on it
  597. self._handle_exception(error)
  598. def close(self):
  599. self.rollback()
  600. self.await_(self._connection.close())
  601. def terminate(self):
  602. self._connection.terminate()
  603. class AsyncAdaptFallback_asyncpg_connection(AsyncAdapt_asyncpg_connection):
  604. __slots__ = ()
  605. await_ = staticmethod(await_fallback)
  606. class AsyncAdapt_asyncpg_dbapi:
  607. def __init__(self, asyncpg):
  608. self.asyncpg = asyncpg
  609. self.paramstyle = "format"
  610. def connect(self, *arg, **kw):
  611. async_fallback = kw.pop("async_fallback", False)
  612. prepared_statement_cache_size = kw.pop(
  613. "prepared_statement_cache_size", 100
  614. )
  615. if util.asbool(async_fallback):
  616. return AsyncAdaptFallback_asyncpg_connection(
  617. self,
  618. await_fallback(self.asyncpg.connect(*arg, **kw)),
  619. prepared_statement_cache_size=prepared_statement_cache_size,
  620. )
  621. else:
  622. return AsyncAdapt_asyncpg_connection(
  623. self,
  624. await_only(self.asyncpg.connect(*arg, **kw)),
  625. prepared_statement_cache_size=prepared_statement_cache_size,
  626. )
  627. class Error(Exception):
  628. pass
  629. class Warning(Exception): # noqa
  630. pass
  631. class InterfaceError(Error):
  632. pass
  633. class DatabaseError(Error):
  634. pass
  635. class InternalError(DatabaseError):
  636. pass
  637. class OperationalError(DatabaseError):
  638. pass
  639. class ProgrammingError(DatabaseError):
  640. pass
  641. class IntegrityError(DatabaseError):
  642. pass
  643. class DataError(DatabaseError):
  644. pass
  645. class NotSupportedError(DatabaseError):
  646. pass
  647. class InternalServerError(InternalError):
  648. pass
  649. class InvalidCachedStatementError(NotSupportedError):
  650. def __init__(self, message):
  651. super(
  652. AsyncAdapt_asyncpg_dbapi.InvalidCachedStatementError, self
  653. ).__init__(
  654. message + " (SQLAlchemy asyncpg dialect will now invalidate "
  655. "all prepared caches in response to this exception)",
  656. )
  657. @util.memoized_property
  658. def _asyncpg_error_translate(self):
  659. import asyncpg
  660. return {
  661. asyncpg.exceptions.IntegrityConstraintViolationError: self.IntegrityError, # noqa: E501
  662. asyncpg.exceptions.PostgresError: self.Error,
  663. asyncpg.exceptions.SyntaxOrAccessError: self.ProgrammingError,
  664. asyncpg.exceptions.InterfaceError: self.InterfaceError,
  665. asyncpg.exceptions.InvalidCachedStatementError: self.InvalidCachedStatementError, # noqa: E501
  666. asyncpg.exceptions.InternalServerError: self.InternalServerError,
  667. }
  668. def Binary(self, value):
  669. return value
  670. STRING = util.symbol("STRING")
  671. TIMESTAMP = util.symbol("TIMESTAMP")
  672. TIMESTAMP_W_TZ = util.symbol("TIMESTAMP_W_TZ")
  673. TIME = util.symbol("TIME")
  674. TIME_W_TZ = util.symbol("TIME_W_TZ")
  675. DATE = util.symbol("DATE")
  676. INTERVAL = util.symbol("INTERVAL")
  677. NUMBER = util.symbol("NUMBER")
  678. FLOAT = util.symbol("FLOAT")
  679. BOOLEAN = util.symbol("BOOLEAN")
  680. INTEGER = util.symbol("INTEGER")
  681. BIGINTEGER = util.symbol("BIGINTEGER")
  682. BYTES = util.symbol("BYTES")
  683. DECIMAL = util.symbol("DECIMAL")
  684. JSON = util.symbol("JSON")
  685. JSONB = util.symbol("JSONB")
  686. ENUM = util.symbol("ENUM")
  687. UUID = util.symbol("UUID")
  688. BYTEA = util.symbol("BYTEA")
  689. DATETIME = TIMESTAMP
  690. BINARY = BYTEA
  691. _pg_types = {
  692. AsyncAdapt_asyncpg_dbapi.STRING: "varchar",
  693. AsyncAdapt_asyncpg_dbapi.TIMESTAMP: "timestamp",
  694. AsyncAdapt_asyncpg_dbapi.TIMESTAMP_W_TZ: "timestamp with time zone",
  695. AsyncAdapt_asyncpg_dbapi.DATE: "date",
  696. AsyncAdapt_asyncpg_dbapi.TIME: "time",
  697. AsyncAdapt_asyncpg_dbapi.TIME_W_TZ: "time with time zone",
  698. AsyncAdapt_asyncpg_dbapi.INTERVAL: "interval",
  699. AsyncAdapt_asyncpg_dbapi.NUMBER: "numeric",
  700. AsyncAdapt_asyncpg_dbapi.FLOAT: "float",
  701. AsyncAdapt_asyncpg_dbapi.BOOLEAN: "bool",
  702. AsyncAdapt_asyncpg_dbapi.INTEGER: "integer",
  703. AsyncAdapt_asyncpg_dbapi.BIGINTEGER: "bigint",
  704. AsyncAdapt_asyncpg_dbapi.BYTES: "bytes",
  705. AsyncAdapt_asyncpg_dbapi.DECIMAL: "decimal",
  706. AsyncAdapt_asyncpg_dbapi.JSON: "json",
  707. AsyncAdapt_asyncpg_dbapi.JSONB: "jsonb",
  708. AsyncAdapt_asyncpg_dbapi.ENUM: "enum",
  709. AsyncAdapt_asyncpg_dbapi.UUID: "uuid",
  710. AsyncAdapt_asyncpg_dbapi.BYTEA: "bytea",
  711. }
  712. class PGDialect_asyncpg(PGDialect):
  713. driver = "asyncpg"
  714. supports_statement_cache = True
  715. supports_unicode_statements = True
  716. supports_server_side_cursors = True
  717. supports_unicode_binds = True
  718. has_terminate = True
  719. default_paramstyle = "format"
  720. supports_sane_multi_rowcount = False
  721. execution_ctx_cls = PGExecutionContext_asyncpg
  722. statement_compiler = PGCompiler_asyncpg
  723. preparer = PGIdentifierPreparer_asyncpg
  724. use_setinputsizes = True
  725. use_native_uuid = True
  726. colspecs = util.update_copy(
  727. PGDialect.colspecs,
  728. {
  729. sqltypes.Time: AsyncpgTime,
  730. sqltypes.Date: AsyncpgDate,
  731. sqltypes.DateTime: AsyncpgDateTime,
  732. sqltypes.Interval: AsyncPgInterval,
  733. INTERVAL: AsyncPgInterval,
  734. UUID: AsyncpgUUID,
  735. sqltypes.Boolean: AsyncpgBoolean,
  736. sqltypes.Integer: AsyncpgInteger,
  737. sqltypes.BigInteger: AsyncpgBigInteger,
  738. sqltypes.Numeric: AsyncpgNumeric,
  739. sqltypes.Float: AsyncpgFloat,
  740. sqltypes.JSON: AsyncpgJSON,
  741. json.JSONB: AsyncpgJSONB,
  742. sqltypes.JSON.JSONPathType: AsyncpgJSONPathType,
  743. sqltypes.JSON.JSONIndexType: AsyncpgJSONIndexType,
  744. sqltypes.JSON.JSONIntIndexType: AsyncpgJSONIntIndexType,
  745. sqltypes.JSON.JSONStrIndexType: AsyncpgJSONStrIndexType,
  746. sqltypes.Enum: AsyncPgEnum,
  747. OID: AsyncpgOID,
  748. REGCLASS: AsyncpgREGCLASS,
  749. },
  750. )
  751. is_async = True
  752. _invalidate_schema_cache_asof = 0
  753. def _invalidate_schema_cache(self):
  754. self._invalidate_schema_cache_asof = time.time()
  755. @util.memoized_property
  756. def _dbapi_version(self):
  757. if self.dbapi and hasattr(self.dbapi, "__version__"):
  758. return tuple(
  759. [
  760. int(x)
  761. for x in re.findall(
  762. r"(\d+)(?:[-\.]?|$)", self.dbapi.__version__
  763. )
  764. ]
  765. )
  766. else:
  767. return (99, 99, 99)
  768. @classmethod
  769. def dbapi(cls):
  770. return AsyncAdapt_asyncpg_dbapi(__import__("asyncpg"))
  771. @util.memoized_property
  772. def _isolation_lookup(self):
  773. return {
  774. "AUTOCOMMIT": "autocommit",
  775. "READ COMMITTED": "read_committed",
  776. "REPEATABLE READ": "repeatable_read",
  777. "SERIALIZABLE": "serializable",
  778. }
  779. def set_isolation_level(self, connection, level):
  780. try:
  781. level = self._isolation_lookup[level.replace("_", " ")]
  782. except KeyError as err:
  783. util.raise_(
  784. exc.ArgumentError(
  785. "Invalid value '%s' for isolation_level. "
  786. "Valid isolation levels for %s are %s"
  787. % (level, self.name, ", ".join(self._isolation_lookup))
  788. ),
  789. replace_context=err,
  790. )
  791. connection.set_isolation_level(level)
  792. def set_readonly(self, connection, value):
  793. connection.readonly = value
  794. def get_readonly(self, connection):
  795. return connection.readonly
  796. def set_deferrable(self, connection, value):
  797. connection.deferrable = value
  798. def get_deferrable(self, connection):
  799. return connection.deferrable
  800. def do_terminate(self, dbapi_connection) -> None:
  801. dbapi_connection.terminate()
  802. def create_connect_args(self, url):
  803. opts = url.translate_connect_args(username="user")
  804. opts.update(url.query)
  805. util.coerce_kw_type(opts, "prepared_statement_cache_size", int)
  806. util.coerce_kw_type(opts, "port", int)
  807. return ([], opts)
  808. @classmethod
  809. def get_pool_class(cls, url):
  810. async_fallback = url.query.get("async_fallback", False)
  811. if util.asbool(async_fallback):
  812. return pool.FallbackAsyncAdaptedQueuePool
  813. else:
  814. return pool.AsyncAdaptedQueuePool
  815. def is_disconnect(self, e, connection, cursor):
  816. if connection:
  817. return connection._connection.is_closed()
  818. else:
  819. return isinstance(
  820. e, self.dbapi.InterfaceError
  821. ) and "connection is closed" in str(e)
  822. def do_set_input_sizes(self, cursor, list_of_tuples, context):
  823. if self.positional:
  824. cursor.setinputsizes(
  825. *[dbtype for key, dbtype, sqltype in list_of_tuples]
  826. )
  827. else:
  828. cursor.setinputsizes(
  829. **{
  830. key: dbtype
  831. for key, dbtype, sqltype in list_of_tuples
  832. if dbtype
  833. }
  834. )
  835. async def setup_asyncpg_json_codec(self, conn):
  836. """set up JSON codec for asyncpg.
  837. This occurs for all new connections and
  838. can be overridden by third party dialects.
  839. .. versionadded:: 1.4.27
  840. """
  841. asyncpg_connection = conn._connection
  842. deserializer = self._json_deserializer or _py_json.loads
  843. def _json_decoder(bin_value):
  844. return deserializer(bin_value.decode())
  845. await asyncpg_connection.set_type_codec(
  846. "json",
  847. encoder=str.encode,
  848. decoder=_json_decoder,
  849. schema="pg_catalog",
  850. format="binary",
  851. )
  852. async def setup_asyncpg_jsonb_codec(self, conn):
  853. """set up JSONB codec for asyncpg.
  854. This occurs for all new connections and
  855. can be overridden by third party dialects.
  856. .. versionadded:: 1.4.27
  857. """
  858. asyncpg_connection = conn._connection
  859. deserializer = self._json_deserializer or _py_json.loads
  860. def _jsonb_encoder(str_value):
  861. # \x01 is the prefix for jsonb used by PostgreSQL.
  862. # asyncpg requires it when format='binary'
  863. return b"\x01" + str_value.encode()
  864. deserializer = self._json_deserializer or _py_json.loads
  865. def _jsonb_decoder(bin_value):
  866. # the byte is the \x01 prefix for jsonb used by PostgreSQL.
  867. # asyncpg returns it when format='binary'
  868. return deserializer(bin_value[1:].decode())
  869. await asyncpg_connection.set_type_codec(
  870. "jsonb",
  871. encoder=_jsonb_encoder,
  872. decoder=_jsonb_decoder,
  873. schema="pg_catalog",
  874. format="binary",
  875. )
  876. def on_connect(self):
  877. """on_connect for asyncpg
  878. A major component of this for asyncpg is to set up type decoders at the
  879. asyncpg level.
  880. See https://github.com/MagicStack/asyncpg/issues/623 for
  881. notes on JSON/JSONB implementation.
  882. """
  883. super_connect = super(PGDialect_asyncpg, self).on_connect()
  884. def connect(conn):
  885. conn.await_(self.setup_asyncpg_json_codec(conn))
  886. conn.await_(self.setup_asyncpg_jsonb_codec(conn))
  887. if super_connect is not None:
  888. super_connect(conn)
  889. return connect
  890. def get_driver_connection(self, connection):
  891. return connection._connection
  892. dialect = PGDialect_asyncpg