扫码登录,获取cookies

This commit is contained in:
2026-03-09 16:10:29 +08:00
parent 754e720ba7
commit 8229208165
7775 changed files with 1150053 additions and 208 deletions

View File

@@ -0,0 +1,14 @@
"""Optional support for sqlalchemy.sql dynamic query generation."""
from .connection import SAConnection
from .engine import create_engine, Engine
from .exc import (Error, ArgumentError, InvalidRequestError,
NoSuchColumnError, ResourceClosedError)
__all__ = ('create_engine', 'SAConnection', 'Error',
'ArgumentError', 'InvalidRequestError', 'NoSuchColumnError',
'ResourceClosedError', 'Engine')
(SAConnection, Error, ArgumentError, InvalidRequestError,
NoSuchColumnError, ResourceClosedError, create_engine, Engine)

View File

@@ -0,0 +1,425 @@
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/connection.py
import weakref
from sqlalchemy.sql import ClauseElement
from sqlalchemy.sql.dml import UpdateBase
from sqlalchemy.sql.ddl import DDLElement
from . import exc
from .result import create_result_proxy
from .transaction import (RootTransaction, Transaction,
NestedTransaction, TwoPhaseTransaction)
from ..utils import _TransactionContextManager, _SAConnectionContextManager
def noop(k):
return k
class SAConnection:
def __init__(self, connection, engine, compiled_cache=None):
self._connection = connection
self._transaction = None
self._savepoint_seq = 0
self._weak_results = weakref.WeakSet()
self._engine = engine
self._dialect = engine.dialect
self._compiled_cache = compiled_cache
def execute(self, query, *multiparams, **params):
"""Executes a SQL query with optional parameters.
query - a SQL query string or any sqlalchemy expression.
*multiparams/**params - represent bound parameter values to be
used in the execution. Typically, the format is a dictionary
passed to *multiparams:
await conn.execute(
table.insert(),
{"id":1, "value":"v1"},
)
...or individual key/values interpreted by **params::
await conn.execute(
table.insert(), id=1, value="v1"
)
In the case that a plain SQL string is passed, a tuple or
individual values in *multiparams may be passed::
await conn.execute(
"INSERT INTO table (id, value) VALUES (%d, %s)",
(1, "v1")
)
await conn.execute(
"INSERT INTO table (id, value) VALUES (%s, %s)",
1, "v1"
)
Returns ResultProxy instance with results of SQL query
execution.
"""
coro = self._execute(query, *multiparams, **params)
return _SAConnectionContextManager(coro)
def _base_params(self, query, dp, compiled, is_update):
"""
handle params
"""
if dp and isinstance(dp, (list, tuple)):
if is_update:
dp = {c.key: pval for c, pval in zip(query.table.c, dp)}
else:
raise exc.ArgumentError(
"Don't mix sqlalchemy SELECT "
"clause with positional "
"parameters"
)
compiled_params = compiled.construct_params(dp)
processors = compiled._bind_processors
params = [{
key: processors.get(key, noop)(compiled_params[key])
for key in compiled_params
}]
post_processed_params = self._dialect.execute_sequence_format(params)
return post_processed_params[0]
async def _executemany(self, query, dps, cursor):
"""
executemany
"""
result_map = None
if isinstance(query, str):
await cursor.executemany(query, dps)
elif isinstance(query, DDLElement):
raise exc.ArgumentError(
"Don't mix sqlalchemy DDL clause "
"and execution with parameters"
)
elif isinstance(query, ClauseElement):
compiled = query.compile(dialect=self._dialect)
params = []
is_update = isinstance(query, UpdateBase)
for dp in dps:
params.append(
self._base_params(
query,
dp,
compiled,
is_update,
)
)
await cursor.executemany(str(compiled), params)
result_map = compiled._result_columns
else:
raise exc.ArgumentError(
"sql statement should be str or "
"SQLAlchemy data "
"selection/modification clause"
)
ret = await create_result_proxy(
self,
cursor,
self._dialect,
result_map
)
self._weak_results.add(ret)
return ret
async def _execute(self, query, *multiparams, **params):
cursor = await self._connection.cursor()
dp = _distill_params(multiparams, params)
if len(dp) > 1:
return await self._executemany(query, dp, cursor)
elif dp:
dp = dp[0]
result_map = None
if isinstance(query, str):
await cursor.execute(query, dp or None)
elif isinstance(query, ClauseElement):
if self._compiled_cache is not None:
key = query
compiled = self._compiled_cache.get(key)
if not compiled:
compiled = query.compile(dialect=self._dialect)
if dp and dp.keys() == compiled.params.keys() \
or not (dp or compiled.params):
# we only want queries with bound params in cache
self._compiled_cache[key] = compiled
else:
compiled = query.compile(dialect=self._dialect)
if not isinstance(query, DDLElement):
post_processed_params = self._base_params(
query,
dp,
compiled,
isinstance(query, UpdateBase)
)
result_map = compiled._result_columns
else:
if dp:
raise exc.ArgumentError("Don't mix sqlalchemy DDL clause "
"and execution with parameters")
post_processed_params = compiled.construct_params()
result_map = None
await cursor.execute(str(compiled), post_processed_params)
else:
raise exc.ArgumentError("sql statement should be str or "
"SQLAlchemy data "
"selection/modification clause")
ret = await create_result_proxy(
self, cursor, self._dialect, result_map
)
self._weak_results.add(ret)
return ret
async def scalar(self, query, *multiparams, **params):
"""Executes a SQL query and returns a scalar value."""
res = await self.execute(query, *multiparams, **params)
return (await res.scalar())
@property
def closed(self):
"""The readonly property that returns True if connections is closed."""
return self._connection is None or self._connection.closed
@property
def connection(self):
return self._connection
def begin(self):
"""Begin a transaction and return a transaction handle.
The returned object is an instance of Transaction. This
object represents the "scope" of the transaction, which
completes when either the .rollback or .commit method is
called.
Nested calls to .begin on the same SAConnection instance will
return new Transaction objects that represent an emulated
transaction within the scope of the enclosing transaction,
that is::
trans = await conn.begin() # outermost transaction
trans2 = await conn.begin() # "nested"
await trans2.commit() # does nothing
await trans.commit() # actually commits
Calls to .commit only have an effect when invoked via the
outermost Transaction object, though the .rollback method of
any of the Transaction objects will roll back the transaction.
See also:
.begin_nested - use a SAVEPOINT
.begin_twophase - use a two phase/XA transaction
"""
coro = self._begin()
return _TransactionContextManager(coro)
async def _begin(self):
if self._transaction is None:
self._transaction = RootTransaction(self)
await self._begin_impl()
return self._transaction
else:
return Transaction(self, self._transaction)
async def _begin_impl(self):
cur = await self._connection.cursor()
try:
await cur.execute('BEGIN')
finally:
await cur.close()
async def _commit_impl(self):
cur = await self._connection.cursor()
try:
await cur.execute('COMMIT')
finally:
await cur.close()
self._transaction = None
async def _rollback_impl(self):
cur = await self._connection.cursor()
try:
await cur.execute('ROLLBACK')
finally:
await cur.close()
self._transaction = None
async def begin_nested(self):
"""Begin a nested transaction and return a transaction handle.
The returned object is an instance of :class:`.NestedTransaction`.
Nested transactions require SAVEPOINT support in the
underlying database. Any transaction in the hierarchy may
.commit() and .rollback(), however the outermost transaction
still controls the overall .commit() or .rollback() of the
transaction of a whole.
"""
if self._transaction is None:
self._transaction = RootTransaction(self)
await self._begin_impl()
else:
self._transaction = NestedTransaction(self, self._transaction)
self._transaction._savepoint = await self._savepoint_impl()
return self._transaction
async def _savepoint_impl(self, name=None):
self._savepoint_seq += 1
name = 'aiomysql_sa_savepoint_%s' % self._savepoint_seq
cur = await self._connection.cursor()
try:
await cur.execute('SAVEPOINT ' + name)
return name
finally:
await cur.close()
async def _rollback_to_savepoint_impl(self, name, parent):
cur = await self._connection.cursor()
try:
await cur.execute('ROLLBACK TO SAVEPOINT ' + name)
finally:
await cur.close()
self._transaction = parent
async def _release_savepoint_impl(self, name, parent):
cur = await self._connection.cursor()
try:
await cur.execute('RELEASE SAVEPOINT ' + name)
finally:
await cur.close()
self._transaction = parent
async def begin_twophase(self, xid=None):
"""Begin a two-phase or XA transaction and return a transaction
handle.
The returned object is an instance of
TwoPhaseTransaction, which in addition to the
methods provided by Transaction, also provides a
TwoPhaseTransaction.prepare() method.
xid - the two phase transaction id. If not supplied, a
random id will be generated.
"""
if self._transaction is not None:
raise exc.InvalidRequestError(
"Cannot start a two phase transaction when a transaction "
"is already in progress.")
if xid is None:
xid = self._dialect.create_xid()
self._transaction = TwoPhaseTransaction(self, xid)
await self.execute("XA START %s", xid)
return self._transaction
async def _prepare_twophase_impl(self, xid):
await self.execute("XA END '%s'" % xid)
await self.execute("XA PREPARE '%s'" % xid)
async def recover_twophase(self):
"""Return a list of prepared twophase transaction ids."""
result = await self.execute("XA RECOVER;")
return [row[0] for row in result]
async def rollback_prepared(self, xid, *, is_prepared=True):
"""Rollback prepared twophase transaction."""
if not is_prepared:
await self.execute("XA END '%s'" % xid)
await self.execute("XA ROLLBACK '%s'" % xid)
async def commit_prepared(self, xid, *, is_prepared=True):
"""Commit prepared twophase transaction."""
if not is_prepared:
await self.execute("XA END '%s'" % xid)
await self.execute("XA COMMIT '%s'" % xid)
@property
def in_transaction(self):
"""Return True if a transaction is in progress."""
return self._transaction is not None and self._transaction.is_active
async def close(self):
"""Close this SAConnection.
This results in a release of the underlying database
resources, that is, the underlying connection referenced
internally. The underlying connection is typically restored
back to the connection-holding Pool referenced by the Engine
that produced this SAConnection. Any transactional state
present on the underlying connection is also unconditionally
released via calling Transaction.rollback() method.
After .close() is called, the SAConnection is permanently in a
closed state, and will allow no further operations.
"""
if self._connection is None:
return
if self._transaction is not None:
await self._transaction.rollback()
self._transaction = None
# don't close underlying connection, it can be reused by pool
# conn.close()
self._engine.release(self)
self._connection = None
self._engine = None
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self.close()
def _distill_params(multiparams, params):
"""Given arguments from the calling form *multiparams, **params,
return a list of bind parameter structures, usually a list of
dictionaries.
In the case of 'raw' execution which accepts positional parameters,
it may be a list of tuples or lists.
"""
if not multiparams:
if params:
return [params]
else:
return []
elif len(multiparams) == 1:
zero = multiparams[0]
if isinstance(zero, (list, tuple)):
if not zero or hasattr(zero[0], '__iter__') and \
not hasattr(zero[0], 'strip'):
# execute(stmt, [{}, {}, {}, ...])
# execute(stmt, [(), (), (), ...])
return zero
else:
# execute(stmt, ("value", "value"))
return [zero]
elif hasattr(zero, 'keys'):
# execute(stmt, {"key":"value"})
return [zero]
else:
# execute(stmt, "value")
return [[zero]]
else:
if (hasattr(multiparams[0], '__iter__') and
not hasattr(multiparams[0], 'strip')):
return multiparams
else:
return [multiparams]

View File

@@ -0,0 +1,235 @@
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/engine.py
import asyncio
import aiomysql
from .connection import SAConnection
from .exc import InvalidRequestError, ArgumentError
from ..utils import _PoolContextManager, _PoolAcquireContextManager
from ..cursors import (
Cursor, DeserializationCursor, DictCursor, SSCursor, SSDictCursor)
try:
from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql
from sqlalchemy.dialects.mysql.mysqldb import MySQLCompiler_mysqldb
except ImportError: # pragma: no cover
raise ImportError('aiomysql.sa requires sqlalchemy')
class MySQLCompiler_pymysql(MySQLCompiler_mysqldb):
def construct_params(self, params=None, _group_number=None, _check=True):
pd = super().construct_params(params, _group_number, _check)
for column in self.prefetch:
pd[column.key] = self._exec_default(column.default)
return pd
def _exec_default(self, default):
if default.is_callable:
return default.arg(self.dialect)
else:
return default.arg
_dialect = MySQLDialect_pymysql(paramstyle='pyformat')
_dialect.statement_compiler = MySQLCompiler_pymysql
_dialect.default_paramstyle = 'pyformat'
def create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1, compiled_cache=None,
**kwargs):
"""A coroutine for Engine creation.
Returns Engine instance with embedded connection pool.
The pool has *minsize* opened connections to MySQL server.
"""
deprecated_cursor_classes = [
DeserializationCursor, DictCursor, SSCursor, SSDictCursor,
]
cursorclass = kwargs.get('cursorclass', Cursor)
if not issubclass(cursorclass, Cursor) or any(
issubclass(cursorclass, cursor_class)
for cursor_class in deprecated_cursor_classes
):
raise ArgumentError('SQLAlchemy engine does not support '
'this cursor class')
coro = _create_engine(minsize=minsize, maxsize=maxsize, loop=loop,
dialect=dialect, pool_recycle=pool_recycle,
compiled_cache=compiled_cache, **kwargs)
return _EngineContextManager(coro)
async def _create_engine(minsize=1, maxsize=10, loop=None,
dialect=_dialect, pool_recycle=-1,
compiled_cache=None, **kwargs):
if loop is None:
loop = asyncio.get_event_loop()
pool = await aiomysql.create_pool(minsize=minsize, maxsize=maxsize,
loop=loop,
pool_recycle=pool_recycle, **kwargs)
conn = await pool.acquire()
try:
return Engine(dialect, pool, compiled_cache=compiled_cache, **kwargs)
finally:
pool.release(conn)
class Engine:
"""Connects a aiomysql.Pool and
sqlalchemy.engine.interfaces.Dialect together to provide a
source of database connectivity and behavior.
An Engine object is instantiated publicly using the
create_engine coroutine.
"""
def __init__(self, dialect, pool, compiled_cache=None, **kwargs):
self._dialect = dialect
self._pool = pool
self._compiled_cache = compiled_cache
self._conn_kw = kwargs
@property
def dialect(self):
"""An dialect for engine."""
return self._dialect
@property
def name(self):
"""A name of the dialect."""
return self._dialect.name
@property
def driver(self):
"""A driver of the dialect."""
return self._dialect.driver
@property
def minsize(self):
return self._pool.minsize
@property
def maxsize(self):
return self._pool.maxsize
@property
def size(self):
return self._pool.size
@property
def freesize(self):
return self._pool.freesize
def close(self):
"""Close engine.
Mark all engine connections to be closed on getting back to pool.
Closed engine doesn't allow to acquire new connections.
"""
self._pool.close()
def terminate(self):
"""Terminate engine.
Terminate engine pool with instantly closing all acquired
connections also.
"""
self._pool.terminate()
async def wait_closed(self):
"""Wait for closing all engine's connections."""
await self._pool.wait_closed()
def acquire(self):
"""Get a connection from pool."""
coro = self._acquire()
return _EngineAcquireContextManager(coro, self)
async def _acquire(self):
raw = await self._pool.acquire()
conn = SAConnection(raw, self, compiled_cache=self._compiled_cache)
return conn
def release(self, conn):
"""Revert back connection to pool."""
if conn.in_transaction:
raise InvalidRequestError("Cannot release a connection with "
"not finished transaction")
raw = conn.connection
return self._pool.release(raw)
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass # pragma: nocover
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from engine) as conn:
# <block>
#
# as an alternative to:
#
# conn = yield from engine.acquire()
# try:
# <block>
# finally:
# engine.release(conn)
conn = yield from self.acquire()
return _ConnectionContextManager(self, conn)
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
self.close()
await self.wait_closed()
_EngineContextManager = _PoolContextManager
_EngineAcquireContextManager = _PoolAcquireContextManager
class _ConnectionContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
connection around a block:
with (yield from engine) as conn:
cur = yield from conn.cursor()
while failing loudly when accidentally using:
with engine:
<block>
"""
__slots__ = ('_engine', '_conn')
def __init__(self, engine, conn):
self._engine = engine
self._conn = conn
def __enter__(self):
assert self._conn is not None
return self._conn
def __exit__(self, *args):
try:
self._engine.release(self._conn)
finally:
self._engine = None
self._conn = None

View File

@@ -0,0 +1,28 @@
# ported from: https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/exc.py
class Error(Exception):
"""Generic error class."""
class ArgumentError(Error):
"""Raised when an invalid or conflicting function argument is supplied.
This error generally corresponds to construction time state errors.
"""
class InvalidRequestError(ArgumentError):
"""aiomysql.sa was asked to do something it can't do.
This error generally corresponds to runtime state errors.
"""
class NoSuchColumnError(KeyError, InvalidRequestError):
"""A nonexistent column is requested from a ``RowProxy``."""
class ResourceClosedError(InvalidRequestError):
"""An operation was requested from a connection, cursor, or other
object that's in a closed state."""

View File

@@ -0,0 +1,458 @@
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/result.py
import weakref
from collections.abc import Mapping, Sequence
from sqlalchemy.sql import expression, sqltypes
from . import exc
async def create_result_proxy(connection, cursor, dialect, result_map):
result_proxy = ResultProxy(connection, cursor, dialect, result_map)
await result_proxy._prepare()
return result_proxy
class RowProxy(Mapping):
__slots__ = ('_result_proxy', '_row', '_processors', '_keymap')
def __init__(self, result_proxy, row, processors, keymap):
"""RowProxy objects are constructed by ResultProxy objects."""
self._result_proxy = result_proxy
self._row = row
self._processors = processors
self._keymap = keymap
def __iter__(self):
return iter(self._result_proxy.keys)
def __len__(self):
return len(self._row)
def __getitem__(self, key):
try:
processor, obj, index = self._keymap[key]
except KeyError:
processor, obj, index = self._result_proxy._key_fallback(key)
# Do we need slicing at all? RowProxy now is Mapping not Sequence
# except TypeError:
# if isinstance(key, slice):
# l = []
# for processor, value in zip(self._processors[key],
# self._row[key]):
# if processor is None:
# l.append(value)
# else:
# l.append(processor(value))
# return tuple(l)
# else:
# raise
if index is None:
raise exc.InvalidRequestError(
"Ambiguous column name '%s' in result set! "
"try 'use_labels' option on select statement." % key)
if processor is not None:
return processor(self._row[index])
else:
return self._row[index]
def __getattr__(self, name):
try:
return self[name]
except KeyError as e:
raise AttributeError(e.args[0])
def __contains__(self, key):
return self._result_proxy._has_key(self._row, key)
__hash__ = None
def __eq__(self, other):
if isinstance(other, RowProxy):
return self.as_tuple() == other.as_tuple()
elif isinstance(other, Sequence):
return self.as_tuple() == other
else:
return NotImplemented
def __ne__(self, other):
return not self == other
def as_tuple(self):
return tuple(self[k] for k in self)
def __repr__(self):
return repr(self.as_tuple())
class ResultMetaData:
"""Handle cursor.description, applying additional info from an execution
context."""
def __init__(self, result_proxy, metadata):
self._processors = processors = []
result_map = {}
if result_proxy._result_map:
result_map = {elem[0]: elem[3] for elem in
result_proxy._result_map}
# We do not strictly need to store the processor in the key mapping,
# though it is faster in the Python version (probably because of the
# saved attribute lookup self._processors)
self._keymap = keymap = {}
self.keys = []
dialect = result_proxy.dialect
# `dbapi_type_map` property removed in SQLAlchemy 1.2+.
# Usage of `getattr` only needed for backward compatibility with
# older versions of SQLAlchemy.
typemap = getattr(dialect, 'dbapi_type_map', {})
assert dialect.case_sensitive, \
"Doesn't support case insensitive database connection"
# high precedence key values.
primary_keymap = {}
assert not dialect.description_encoding, \
"psycopg in py3k should not use this"
for i, rec in enumerate(metadata):
colname = rec[0]
coltype = rec[1]
# PostgreSQL doesn't require this.
# if dialect.requires_name_normalize:
# colname = dialect.normalize_name(colname)
name, obj, type_ = (
colname,
None,
result_map.get(
colname,
typemap.get(coltype, sqltypes.NULLTYPE))
)
processor = type_._cached_result_processor(dialect, coltype)
processors.append(processor)
rec = (processor, obj, i)
# indexes as keys. This is only needed for the Python version of
# RowProxy (the C version uses a faster path for integer indexes).
primary_keymap[i] = rec
# populate primary keymap, looking for conflicts.
if primary_keymap.setdefault(name, rec) is not rec:
# place a record that doesn't have the "index" - this
# is interpreted later as an AmbiguousColumnError,
# but only when actually accessed. Columns
# colliding by name is not a problem if those names
# aren't used; integer access is always
# unambiguous.
primary_keymap[name] = rec = (None, obj, None)
self.keys.append(colname)
if obj:
for o in obj:
keymap[o] = rec
# technically we should be doing this but we
# are saving on callcounts by not doing so.
# if keymap.setdefault(o, rec) is not rec:
# keymap[o] = (None, obj, None)
# overwrite keymap values with those of the
# high precedence keymap.
keymap.update(primary_keymap)
def _key_fallback(self, key, raiseerr=True):
map = self._keymap
result = None
if isinstance(key, str):
result = map.get(key)
# fallback for targeting a ColumnElement to a textual expression
# this is a rare use case which only occurs when matching text()
# or colummn('name') constructs to ColumnElements, or after a
# pickle/unpickle roundtrip
elif isinstance(key, expression.ColumnElement):
if (key._label and key._label in map):
result = map[key._label]
elif (hasattr(key, 'name') and key.name in map):
# match is only on name.
result = map[key.name]
# search extra hard to make sure this
# isn't a column/label name overlap.
# this check isn't currently available if the row
# was unpickled.
if (result is not None and
result[1] is not None):
for obj in result[1]:
if key._compare_name_for_result(obj):
break
else:
result = None
if result is None:
if raiseerr:
raise exc.NoSuchColumnError(
"Could not locate column in row for column '%s'" %
expression._string_or_unprintable(key))
else:
return None
else:
map[key] = result
return result
def _has_key(self, row, key):
if key in self._keymap:
return True
else:
return self._key_fallback(key, False) is not None
class ResultProxy:
"""Wraps a DB-API cursor object to provide easier access to row columns.
Individual columns may be accessed by their integer position,
case-insensitive column name, or by sqlalchemy schema.Column
object. e.g.:
row = fetchone()
col1 = row[0] # access via integer position
col2 = row['col2'] # access via name
col3 = row[mytable.c.mycol] # access via Column object.
ResultProxy also handles post-processing of result column
data using sqlalchemy TypeEngine objects, which are referenced from
the originating SQL statement that produced this result set.
"""
def __init__(self, connection, cursor, dialect, result_map):
self._dialect = dialect
self._closed = False
self._cursor = cursor
self._connection = connection
self._rowcount = cursor.rowcount
self._lastrowid = cursor.lastrowid
self._result_map = result_map
async def _prepare(self):
loop = self._connection.connection.loop
cursor = self._cursor
if cursor.description is not None:
self._metadata = ResultMetaData(self, cursor.description)
def callback(wr):
loop.create_task(cursor.close())
self._weak = weakref.ref(self, callback)
else:
self._metadata = None
await self.close()
self._weak = None
@property
def dialect(self):
"""SQLAlchemy dialect."""
return self._dialect
@property
def cursor(self):
return self._cursor
def keys(self):
"""Return the current set of string keys for rows."""
if self._metadata:
return tuple(self._metadata.keys)
else:
return ()
@property
def rowcount(self):
"""Return the 'rowcount' for this result.
The 'rowcount' reports the number of rows *matched*
by the WHERE criterion of an UPDATE or DELETE statement.
.. note::
Notes regarding .rowcount:
* This attribute returns the number of rows *matched*,
which is not necessarily the same as the number of rows
that were actually *modified* - an UPDATE statement, for example,
may have no net change on a given row if the SET values
given are the same as those present in the row already.
Such a row would be matched but not modified.
* .rowcount is *only* useful in conjunction
with an UPDATE or DELETE statement. Contrary to what the Python
DBAPI says, it does *not* return the
number of rows available from the results of a SELECT statement
as DBAPIs cannot support this functionality when rows are
unbuffered.
* Statements that use RETURNING may not return a correct
rowcount.
"""
return self._rowcount
@property
def lastrowid(self):
"""Returns the 'lastrowid' accessor on the DBAPI cursor.
This is a DBAPI specific method and is only functional
for those backends which support it, for statements
where it is appropriate.
"""
return self._lastrowid
@property
def returns_rows(self):
"""True if this ResultProxy returns rows.
I.e. if it is legal to call the methods .fetchone(),
.fetchmany() and .fetchall()`.
"""
return self._metadata is not None
@property
def closed(self):
return self._closed
async def close(self):
"""Close this ResultProxy.
Closes the underlying DBAPI cursor corresponding to the execution.
Note that any data cached within this ResultProxy is still available.
For some types of results, this may include buffered rows.
If this ResultProxy was generated from an implicit execution,
the underlying Connection will also be closed (returns the
underlying DBAPI connection to the connection pool.)
This method is called automatically when:
* all result rows are exhausted using the fetchXXX() methods.
* cursor.description is None.
"""
if not self._closed:
self._closed = True
await self._cursor.close()
# allow consistent errors
self._cursor = None
self._weak = None
# def __iter__(self):
# while True:
# row = yield from self.fetchone()
# if row is None:
# raise StopIteration
# else:
# yield row
def _non_result(self):
if self._metadata is None:
raise exc.ResourceClosedError(
"This result object does not return rows. "
"It has been closed automatically.")
else:
raise exc.ResourceClosedError("This result object is closed.")
def _process_rows(self, rows):
process_row = RowProxy
metadata = self._metadata
keymap = metadata._keymap
processors = metadata._processors
return [process_row(metadata, row, processors, keymap)
for row in rows]
async def fetchall(self):
"""Fetch all rows, just like DB-API cursor.fetchall()."""
try:
rows = await self._cursor.fetchall()
except AttributeError:
self._non_result()
else:
ret = self._process_rows(rows)
await self.close()
return ret
async def fetchone(self):
"""Fetch one row, just like DB-API cursor.fetchone().
If a row is present, the cursor remains open after this is called.
Else the cursor is automatically closed and None is returned.
"""
try:
row = await self._cursor.fetchone()
except AttributeError:
self._non_result()
else:
if row is not None:
return self._process_rows([row])[0]
else:
await self.close()
return None
async def fetchmany(self, size=None):
"""Fetch many rows, just like DB-API
cursor.fetchmany(size=cursor.arraysize).
If rows are present, the cursor remains open after this is called.
Else the cursor is automatically closed and an empty list is returned.
"""
try:
if size is None:
rows = await self._cursor.fetchmany()
else:
rows = await self._cursor.fetchmany(size)
except AttributeError:
self._non_result()
else:
ret = self._process_rows(rows)
if len(ret) == 0:
await self.close()
return ret
async def first(self):
"""Fetch the first row and then close the result set unconditionally.
Returns None if no row is present.
"""
if self._metadata is None:
self._non_result()
try:
return (await self.fetchone())
finally:
await self.close()
async def scalar(self):
"""Fetch the first column of the first row, and close the result set.
Returns None if no row is present.
"""
row = await self.first()
if row is not None:
return row[0]
else:
return None
def __aiter__(self):
return self
async def __anext__(self):
data = await self.fetchone()
if data is not None:
return data
else:
raise StopAsyncIteration # noqa

View File

@@ -0,0 +1,169 @@
# ported from:
# https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/transaction.py
from . import exc
class Transaction:
"""Represent a database transaction in progress.
The Transaction object is procured by
calling the SAConnection.begin() method of
SAConnection:
with (yield from engine) as conn:
trans = yield from conn.begin()
try:
yield from conn.execute("insert into x (a, b) values (1, 2)")
except Exception:
yield from trans.rollback()
else:
yield from trans.commit()
The object provides .rollback() and .commit()
methods in order to control transaction boundaries.
See also: SAConnection.begin(), SAConnection.begin_twophase(),
SAConnection.begin_nested().
"""
def __init__(self, connection, parent):
self._connection = connection
self._parent = parent or self
self._is_active = True
@property
def is_active(self):
"""Return ``True`` if a transaction is active."""
return self._is_active
@property
def connection(self):
"""Return transaction's connection (SAConnection instance)."""
return self._connection
async def close(self):
"""Close this transaction.
If this transaction is the base transaction in a begin/commit
nesting, the transaction will rollback(). Otherwise, the
method returns.
This is used to cancel a Transaction without affecting the scope of
an enclosing transaction.
"""
if not self._parent._is_active:
return
if self._parent is self:
await self.rollback()
else:
self._is_active = False
async def rollback(self):
"""Roll back this transaction."""
if not self._parent._is_active:
return
await self._do_rollback()
self._is_active = False
async def _do_rollback(self):
await self._parent.rollback()
async def commit(self):
"""Commit this transaction."""
if not self._parent._is_active:
raise exc.InvalidRequestError("This transaction is inactive")
await self._do_commit()
self._is_active = False
async def _do_commit(self):
pass
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
if exc_type:
await self.rollback()
else:
if self._is_active:
await self.commit()
class RootTransaction(Transaction):
def __init__(self, connection):
super().__init__(connection, None)
async def _do_rollback(self):
await self._connection._rollback_impl()
async def _do_commit(self):
await self._connection._commit_impl()
class NestedTransaction(Transaction):
"""Represent a 'nested', or SAVEPOINT transaction.
A new NestedTransaction object may be procured
using the SAConnection.begin_nested() method.
The interface is the same as that of Transaction class.
"""
_savepoint = None
def __init__(self, connection, parent):
super().__init__(connection, parent)
async def _do_rollback(self):
assert self._savepoint is not None, "Broken transaction logic"
if self._is_active:
await self._connection._rollback_to_savepoint_impl(
self._savepoint, self._parent)
async def _do_commit(self):
assert self._savepoint is not None, "Broken transaction logic"
if self._is_active:
await self._connection._release_savepoint_impl(
self._savepoint, self._parent)
class TwoPhaseTransaction(Transaction):
"""Represent a two-phase transaction.
A new TwoPhaseTransaction object may be procured
using the SAConnection.begin_twophase() method.
The interface is the same as that of Transaction class
with the addition of the .prepare() method.
"""
def __init__(self, connection, xid):
super().__init__(connection, None)
self._is_prepared = False
self._xid = xid
@property
def xid(self):
"""Returns twophase transaction id."""
return self._xid
async def prepare(self):
"""Prepare this TwoPhaseTransaction.
After a PREPARE, the transaction can be committed.
"""
if not self._parent.is_active:
raise exc.InvalidRequestError("This transaction is inactive")
await self._connection._prepare_twophase_impl(self._xid)
self._is_prepared = True
async def _do_rollback(self):
await self._connection.rollback_prepared(
self._xid, is_prepared=self._is_prepared)
async def _do_commit(self):
await self._connection.commit_prepared(
self._xid, is_prepared=self._is_prepared)