lol
This commit is contained in:
@@ -269,7 +269,7 @@ from .types import Uuid as Uuid
|
||||
from .types import VARBINARY as VARBINARY
|
||||
from .types import VARCHAR as VARCHAR
|
||||
|
||||
__version__ = "2.0.20"
|
||||
__version__ = "2.0.23"
|
||||
|
||||
|
||||
def __go(lcls: Any) -> None:
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -227,7 +227,7 @@ class PyODBCConnector(Connector):
|
||||
def get_isolation_level_values(
|
||||
self, dbapi_connection: interfaces.DBAPIConnection
|
||||
) -> List[IsolationLevel]:
|
||||
return super().get_isolation_level_values(dbapi_connection) + [ # type: ignore # noqa: E501
|
||||
return super().get_isolation_level_values(dbapi_connection) + [
|
||||
"AUTOCOMMIT"
|
||||
]
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -51,7 +51,7 @@ def _auto_fn(name: str) -> Optional[Callable[[], Type[Dialect]]]:
|
||||
|
||||
if hasattr(module, driver):
|
||||
module = getattr(module, driver)
|
||||
return lambda: module.dialect # type: ignore
|
||||
return lambda: module.dialect
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -6,6 +6,7 @@
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from . import aioodbc # noqa
|
||||
from . import base # noqa
|
||||
from . import pymssql # noqa
|
||||
from . import pyodbc # noqa
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1537,28 +1537,22 @@ class MSUUid(sqltypes.Uuid):
|
||||
if self.native_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = f"""'{str(value).replace("''", "'")}'"""
|
||||
return value
|
||||
return f"""'{str(value).replace("''", "'")}'"""
|
||||
|
||||
return process
|
||||
else:
|
||||
if self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = f"""'{value.hex}'"""
|
||||
return value
|
||||
return f"""'{value.hex}'"""
|
||||
|
||||
return process
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
value = f"""'{
|
||||
return f"""'{
|
||||
value.replace("-", "").replace("'", "''")
|
||||
}'"""
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
@@ -1942,6 +1936,7 @@ class MSExecutionContext(default.DefaultExecutionContext):
|
||||
row = self.cursor.fetchall()[0]
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML
|
||||
elif (
|
||||
self.compiled is not None
|
||||
and is_sql_compiler(self.compiled)
|
||||
@@ -2057,6 +2052,12 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
||||
def visit_char_length_func(self, fn, **kw):
|
||||
return "LEN%s" % self.function_argspec(fn, **kw)
|
||||
|
||||
def visit_aggregate_strings_func(self, fn, **kw):
|
||||
expr = fn.clauses.clauses[0]._compiler_dispatch(self, **kw)
|
||||
kw["literal_execute"] = True
|
||||
delimeter = fn.clauses.clauses[1]._compiler_dispatch(self, **kw)
|
||||
return f"string_agg({expr}, {delimeter})"
|
||||
|
||||
def visit_concat_op_expression_clauselist(
|
||||
self, clauselist, operator, **kw
|
||||
):
|
||||
@@ -2119,6 +2120,7 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
||||
or (
|
||||
# limit can use TOP with is by itself. fetch only uses TOP
|
||||
# when it needs to because of PERCENT and/or WITH TIES
|
||||
# TODO: Why? shouldn't we use TOP always ?
|
||||
select._simple_int_clause(select._fetch_clause)
|
||||
and (
|
||||
select._fetch_clause_options["percent"]
|
||||
@@ -2379,10 +2381,13 @@ class MSSQLCompiler(compiler.SQLCompiler):
|
||||
return ""
|
||||
|
||||
def order_by_clause(self, select, **kw):
|
||||
# MSSQL only allows ORDER BY in subqueries if there is a LIMIT
|
||||
# MSSQL only allows ORDER BY in subqueries if there is a LIMIT:
|
||||
# "The ORDER BY clause is invalid in views, inline functions,
|
||||
# derived tables, subqueries, and common table expressions,
|
||||
# unless TOP, OFFSET or FOR XML is also specified."
|
||||
if (
|
||||
self.is_subquery()
|
||||
and not select._limit
|
||||
and not self._use_top(select)
|
||||
and (
|
||||
select._offset is None
|
||||
or not self.dialect._supports_offset_fetch
|
||||
|
||||
@@ -211,7 +211,7 @@ class NumericSqlVariant(TypeDecorator):
|
||||
cache_ok = True
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
return cast(colexpr, Numeric)
|
||||
return cast(colexpr, Numeric(38, 0))
|
||||
|
||||
|
||||
identity_columns = Table(
|
||||
|
||||
@@ -26,7 +26,7 @@ def generate_driver_url(url, driver, query_str):
|
||||
|
||||
new_url = url.set(drivername="%s+%s" % (backend, driver))
|
||||
|
||||
if driver != "pyodbc":
|
||||
if driver not in ("pyodbc", "aioodbc"):
|
||||
new_url = new_url.set(query="")
|
||||
|
||||
if query_str:
|
||||
|
||||
@@ -365,6 +365,7 @@ from ... import exc
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ...engine import cursor as _cursor
|
||||
|
||||
|
||||
class _ms_numeric_pyodbc:
|
||||
@@ -585,14 +586,22 @@ class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
try:
|
||||
# fetchall() ensures the cursor is consumed
|
||||
# without closing it (FreeTDS particularly)
|
||||
row = self.cursor.fetchall()[0]
|
||||
break
|
||||
rows = self.cursor.fetchall()
|
||||
except self.dialect.dbapi.Error:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
else:
|
||||
if not rows:
|
||||
# async adapter drivers just return None here
|
||||
self.cursor.nextset()
|
||||
continue
|
||||
row = rows[0]
|
||||
break
|
||||
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML
|
||||
else:
|
||||
super().post_exec()
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -37,6 +37,8 @@ from ...util.concurrency import await_only
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_cursor:
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
server_side = False
|
||||
__slots__ = (
|
||||
"_adapt_connection",
|
||||
@@ -139,6 +141,8 @@ class AsyncAdapt_aiomysql_cursor:
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
server_side = True
|
||||
|
||||
@@ -167,6 +171,8 @@ class AsyncAdapt_aiomysql_ss_cursor(AsyncAdapt_aiomysql_cursor):
|
||||
|
||||
|
||||
class AsyncAdapt_aiomysql_connection(AdaptedConnection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
await_ = staticmethod(await_only)
|
||||
__slots__ = ("dbapi", "_execute_mutex")
|
||||
|
||||
@@ -202,6 +208,8 @@ class AsyncAdapt_aiomysql_connection(AdaptedConnection):
|
||||
|
||||
|
||||
class AsyncAdaptFallback_aiomysql_connection(AsyncAdapt_aiomysql_connection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
|
||||
await_ = staticmethod(await_fallback)
|
||||
|
||||
@@ -37,6 +37,8 @@ from ...util.concurrency import await_only
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_cursor:
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
server_side = False
|
||||
__slots__ = (
|
||||
"_adapt_connection",
|
||||
@@ -141,6 +143,8 @@ class AsyncAdapt_asyncmy_cursor:
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
__slots__ = ()
|
||||
server_side = True
|
||||
|
||||
@@ -171,6 +175,8 @@ class AsyncAdapt_asyncmy_ss_cursor(AsyncAdapt_asyncmy_cursor):
|
||||
|
||||
|
||||
class AsyncAdapt_asyncmy_connection(AdaptedConnection):
|
||||
# TODO: base on connectors/asyncio.py
|
||||
# see #10415
|
||||
await_ = staticmethod(await_only)
|
||||
__slots__ = ("dbapi", "_execute_mutex")
|
||||
|
||||
|
||||
@@ -999,14 +999,14 @@ output::
|
||||
)
|
||||
|
||||
""" # noqa
|
||||
from __future__ import annotations
|
||||
|
||||
from array import array as _array
|
||||
from collections import defaultdict
|
||||
from itertools import compress
|
||||
import re
|
||||
from typing import cast
|
||||
|
||||
from sqlalchemy import literal_column
|
||||
from sqlalchemy.sql import visitors
|
||||
from . import reflection as _reflection
|
||||
from .enumerated import ENUM
|
||||
from .enumerated import SET
|
||||
@@ -1047,10 +1047,12 @@ from .types import TINYTEXT
|
||||
from .types import VARCHAR
|
||||
from .types import YEAR
|
||||
from ... import exc
|
||||
from ... import literal_column
|
||||
from ... import log
|
||||
from ... import schema as sa_schema
|
||||
from ... import sql
|
||||
from ... import util
|
||||
from ...engine import cursor as _cursor
|
||||
from ...engine import default
|
||||
from ...engine import reflection
|
||||
from ...engine.reflection import ReflectionDefaults
|
||||
@@ -1062,7 +1064,10 @@ from ...sql import operators
|
||||
from ...sql import roles
|
||||
from ...sql import sqltypes
|
||||
from ...sql import util as sql_util
|
||||
from ...sql import visitors
|
||||
from ...sql.compiler import InsertmanyvaluesSentinelOpts
|
||||
from ...sql.compiler import SQLCompiler
|
||||
from ...sql.schema import SchemaConst
|
||||
from ...types import BINARY
|
||||
from ...types import BLOB
|
||||
from ...types import BOOLEAN
|
||||
@@ -1071,6 +1076,7 @@ from ...types import UUID
|
||||
from ...types import VARBINARY
|
||||
from ...util import topological
|
||||
|
||||
|
||||
SET_RE = re.compile(
|
||||
r"\s*SET\s+(?:(?:GLOBAL|SESSION)\s+)?\w", re.I | re.UNICODE
|
||||
)
|
||||
@@ -1164,6 +1170,32 @@ ischema_names = {
|
||||
|
||||
|
||||
class MySQLExecutionContext(default.DefaultExecutionContext):
|
||||
def post_exec(self):
|
||||
if (
|
||||
self.isdelete
|
||||
and cast(SQLCompiler, self.compiled).effective_returning
|
||||
and not self.cursor.description
|
||||
):
|
||||
# All MySQL/mariadb drivers appear to not include
|
||||
# cursor.description for DELETE..RETURNING with no rows if the
|
||||
# WHERE criteria is a straight "false" condition such as our EMPTY
|
||||
# IN condition. manufacture an empty result in this case (issue
|
||||
# #10505)
|
||||
#
|
||||
# taken from cx_Oracle implementation
|
||||
self.cursor_fetch_strategy = (
|
||||
_cursor.FullyBufferedCursorFetchStrategy(
|
||||
self.cursor,
|
||||
[
|
||||
(entry.keyname, None)
|
||||
for entry in cast(
|
||||
SQLCompiler, self.compiled
|
||||
)._result_columns
|
||||
],
|
||||
[],
|
||||
)
|
||||
)
|
||||
|
||||
def create_server_side_cursor(self):
|
||||
if self.dialect.supports_server_side_cursors:
|
||||
return self._dbapi_connection.cursor(self.dialect._sscursor)
|
||||
@@ -1208,6 +1240,12 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
)
|
||||
return f"{clause} WITH ROLLUP"
|
||||
|
||||
def visit_aggregate_strings_func(self, fn, **kw):
|
||||
expr, delimeter = (
|
||||
elem._compiler_dispatch(self, **kw) for elem in fn.clauses
|
||||
)
|
||||
return f"group_concat({expr} SEPARATOR {delimeter})"
|
||||
|
||||
def visit_sequence(self, seq, **kw):
|
||||
return "nextval(%s)" % self.preparer.format_sequence(seq)
|
||||
|
||||
@@ -1760,7 +1798,12 @@ class MySQLCompiler(compiler.SQLCompiler):
|
||||
class MySQLDDLCompiler(compiler.DDLCompiler):
|
||||
def get_column_specification(self, column, **kw):
|
||||
"""Builds column DDL."""
|
||||
|
||||
if (
|
||||
self.dialect.is_mariadb is True
|
||||
and column.computed is not None
|
||||
and column._user_defined_nullable is SchemaConst.NULL_UNSPECIFIED
|
||||
):
|
||||
column.nullable = True
|
||||
colspec = [
|
||||
self.preparer.format_column(column),
|
||||
self.dialect.type_compiler_instance.process(
|
||||
|
||||
@@ -30,16 +30,46 @@ be ``mysqldb``. ``mariadb+mariadbconnector://`` is required to use this driver.
|
||||
|
||||
""" # noqa
|
||||
import re
|
||||
from uuid import UUID as _python_UUID
|
||||
|
||||
from .base import MySQLCompiler
|
||||
from .base import MySQLDialect
|
||||
from .base import MySQLExecutionContext
|
||||
from ... import sql
|
||||
from ... import util
|
||||
from ...sql import sqltypes
|
||||
|
||||
|
||||
mariadb_cpy_minimum_version = (1, 0, 1)
|
||||
|
||||
|
||||
class _MariaDBUUID(sqltypes.UUID[sqltypes._UUID_RETURN]):
|
||||
# work around JIRA issue
|
||||
# https://jira.mariadb.org/browse/CONPY-270. When that issue is fixed,
|
||||
# this type can be removed.
|
||||
def result_processor(self, dialect, coltype):
|
||||
if self.as_uuid:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if hasattr(value, "decode"):
|
||||
value = value.decode("ascii")
|
||||
value = _python_UUID(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
else:
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if hasattr(value, "decode"):
|
||||
value = value.decode("ascii")
|
||||
value = str(_python_UUID(value))
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
|
||||
_lastrowid = None
|
||||
|
||||
@@ -50,9 +80,20 @@ class MySQLExecutionContext_mariadbconnector(MySQLExecutionContext):
|
||||
return self._dbapi_connection.cursor(buffered=True)
|
||||
|
||||
def post_exec(self):
|
||||
super().post_exec()
|
||||
|
||||
self._rowcount = self.cursor.rowcount
|
||||
|
||||
if self.isinsert and self.compiled.postfetch_lastrowid:
|
||||
self._lastrowid = self.cursor.lastrowid
|
||||
|
||||
@property
|
||||
def rowcount(self):
|
||||
if self._rowcount is not None:
|
||||
return self._rowcount
|
||||
else:
|
||||
return self.cursor.rowcount
|
||||
|
||||
def get_lastrowid(self):
|
||||
return self._lastrowid
|
||||
|
||||
@@ -87,6 +128,10 @@ class MySQLDialect_mariadbconnector(MySQLDialect):
|
||||
|
||||
supports_server_side_cursors = True
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MySQLDialect.colspecs, {sqltypes.Uuid: _MariaDBUUID}
|
||||
)
|
||||
|
||||
@util.memoized_property
|
||||
def _dbapi_version(self):
|
||||
if self.dbapi and hasattr(self.dbapi, "__version__"):
|
||||
|
||||
@@ -168,7 +168,7 @@ class MySQLDialect_mysqldb(MySQLDialect):
|
||||
return on_connect
|
||||
|
||||
def do_ping(self, dbapi_connection):
|
||||
dbapi_connection.ping(False)
|
||||
dbapi_connection.ping()
|
||||
return True
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
|
||||
@@ -74,6 +74,40 @@ class MySQLDialect_pymysql(MySQLDialect_mysqldb):
|
||||
def import_dbapi(cls):
|
||||
return __import__("pymysql")
|
||||
|
||||
@langhelpers.memoized_property
|
||||
def _send_false_to_ping(self):
|
||||
"""determine if pymysql has deprecated, changed the default of,
|
||||
or removed the 'reconnect' argument of connection.ping().
|
||||
|
||||
See #10492 and
|
||||
https://github.com/PyMySQL/mysqlclient/discussions/651#discussioncomment-7308971
|
||||
for background.
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
try:
|
||||
Connection = __import__("pymysql.connections").Connection
|
||||
except (ImportError, AttributeError):
|
||||
return True
|
||||
else:
|
||||
insp = langhelpers.get_callable_argspec(Connection.ping)
|
||||
try:
|
||||
reconnect_arg = insp.args[1]
|
||||
except IndexError:
|
||||
return False
|
||||
else:
|
||||
return reconnect_arg == "reconnect" and (
|
||||
not insp.defaults or insp.defaults[0] is not False
|
||||
)
|
||||
|
||||
def do_ping(self, dbapi_connection):
|
||||
if self._send_false_to_ping:
|
||||
dbapi_connection.ping(False)
|
||||
else:
|
||||
dbapi_connection.ping()
|
||||
|
||||
return True
|
||||
|
||||
def create_connect_args(self, url, _translate_args=None):
|
||||
if _translate_args is None:
|
||||
_translate_args = dict(username="user")
|
||||
|
||||
@@ -509,7 +509,7 @@ class MySQLTableDefinitionParser:
|
||||
r"\((?P<local>[^\)]+?)\) REFERENCES +"
|
||||
r"(?P<table>%(iq)s[^%(fq)s]+%(fq)s"
|
||||
r"(?:\.%(iq)s[^%(fq)s]+%(fq)s)?) +"
|
||||
r"\((?P<foreign>[^\)]+?)\)"
|
||||
r"\((?P<foreign>(?:%(iq)s[^%(fq)s]+%(fq)s(?: *, *)?)+)\)"
|
||||
r"(?: +(?P<match>MATCH \w+))?"
|
||||
r"(?: +ON DELETE (?P<ondelete>%(on)s))?"
|
||||
r"(?: +ON UPDATE (?P<onupdate>%(on)s))?" % kw
|
||||
|
||||
@@ -59,4 +59,5 @@ __all__ = (
|
||||
"VARCHAR2",
|
||||
"NVARCHAR2",
|
||||
"ROWID",
|
||||
"REAL",
|
||||
)
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -1241,6 +1241,9 @@ class OracleCompiler(compiler.SQLCompiler):
|
||||
self.render_literal_value(flags, sqltypes.STRINGTYPE),
|
||||
)
|
||||
|
||||
def visit_aggregate_strings_func(self, fn, **kw):
|
||||
return "LISTAGG%s" % self.function_argspec(fn, **kw)
|
||||
|
||||
|
||||
class OracleDDLCompiler(compiler.DDLCompiler):
|
||||
def define_constraint_cascades(self, constraint):
|
||||
@@ -1315,8 +1318,9 @@ class OracleDDLCompiler(compiler.DDLCompiler):
|
||||
text = text.replace("NO MINVALUE", "NOMINVALUE")
|
||||
text = text.replace("NO MAXVALUE", "NOMAXVALUE")
|
||||
text = text.replace("NO CYCLE", "NOCYCLE")
|
||||
text = text.replace("NO ORDER", "NOORDER")
|
||||
return text
|
||||
if identity_options.order is not None:
|
||||
text += " ORDER" if identity_options.order else " NOORDER"
|
||||
return text.strip()
|
||||
|
||||
def visit_computed_column(self, generated, **kw):
|
||||
text = "GENERATED ALWAYS AS (%s)" % self.sql_compiler.process(
|
||||
|
||||
@@ -978,8 +978,8 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
|
||||
driver = "cx_oracle"
|
||||
|
||||
colspecs = OracleDialect.colspecs
|
||||
colspecs.update(
|
||||
colspecs = util.update_copy(
|
||||
OracleDialect.colspecs,
|
||||
{
|
||||
sqltypes.TIMESTAMP: _CXOracleTIMESTAMP,
|
||||
sqltypes.Numeric: _OracleNumeric,
|
||||
@@ -1006,7 +1006,7 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
sqltypes.Uuid: _OracleUUID,
|
||||
oracle.NCLOB: _OracleUnicodeTextNCLOB,
|
||||
oracle.ROWID: _OracleRowid,
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
execute_sequence_format = list
|
||||
@@ -1088,9 +1088,9 @@ class OracleDialect_cx_oracle(OracleDialect):
|
||||
int(x) for x in m.group(1, 2, 3) if x is not None
|
||||
)
|
||||
self.cx_oracle_ver = version
|
||||
if self.cx_oracle_ver < (7,) and self.cx_oracle_ver > (0, 0, 0):
|
||||
if self.cx_oracle_ver < (8,) and self.cx_oracle_ver > (0, 0, 0):
|
||||
raise exc.InvalidRequestError(
|
||||
"cx_Oracle version 7 and above are supported"
|
||||
"cx_Oracle version 8 and above are supported"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -4,12 +4,22 @@
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime as dt
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from ... import exc
|
||||
from ...sql import sqltypes
|
||||
from ...types import NVARCHAR
|
||||
from ...types import VARCHAR
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...engine.interfaces import Dialect
|
||||
from ...sql.type_api import _LiteralProcessorType
|
||||
|
||||
|
||||
class RAW(sqltypes._Binary):
|
||||
__visit_name__ = "RAW"
|
||||
@@ -116,38 +126,36 @@ class LONG(sqltypes.Text):
|
||||
class _OracleDateLiteralRender:
|
||||
def _literal_processor_datetime(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if getattr(value, "microsecond", None):
|
||||
value = (
|
||||
f"""TO_TIMESTAMP"""
|
||||
f"""('{value.isoformat().replace("T", " ")}', """
|
||||
"""'YYYY-MM-DD HH24:MI:SS.FF')"""
|
||||
)
|
||||
else:
|
||||
value = (
|
||||
f"""TO_DATE"""
|
||||
f"""('{value.isoformat().replace("T", " ")}', """
|
||||
"""'YYYY-MM-DD HH24:MI:SS')"""
|
||||
)
|
||||
if getattr(value, "microsecond", None):
|
||||
value = (
|
||||
f"""TO_TIMESTAMP"""
|
||||
f"""('{value.isoformat().replace("T", " ")}', """
|
||||
"""'YYYY-MM-DD HH24:MI:SS.FF')"""
|
||||
)
|
||||
else:
|
||||
value = (
|
||||
f"""TO_DATE"""
|
||||
f"""('{value.isoformat().replace("T", " ")}', """
|
||||
"""'YYYY-MM-DD HH24:MI:SS')"""
|
||||
)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def _literal_processor_date(self, dialect):
|
||||
def process(value):
|
||||
if value is not None:
|
||||
if getattr(value, "microsecond", None):
|
||||
value = (
|
||||
f"""TO_TIMESTAMP"""
|
||||
f"""('{value.isoformat().split("T")[0]}', """
|
||||
"""'YYYY-MM-DD')"""
|
||||
)
|
||||
else:
|
||||
value = (
|
||||
f"""TO_DATE"""
|
||||
f"""('{value.isoformat().split("T")[0]}', """
|
||||
"""'YYYY-MM-DD')"""
|
||||
)
|
||||
if getattr(value, "microsecond", None):
|
||||
value = (
|
||||
f"""TO_TIMESTAMP"""
|
||||
f"""('{value.isoformat().split("T")[0]}', """
|
||||
"""'YYYY-MM-DD')"""
|
||||
)
|
||||
else:
|
||||
value = (
|
||||
f"""TO_DATE"""
|
||||
f"""('{value.isoformat().split("T")[0]}', """
|
||||
"""'YYYY-MM-DD')"""
|
||||
)
|
||||
return value
|
||||
|
||||
return process
|
||||
@@ -203,6 +211,15 @@ class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
|
||||
second_precision=interval.second_precision,
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def adapt_emulated_to_native(
|
||||
cls, interval: sqltypes.Interval, **kw # type: ignore[override]
|
||||
):
|
||||
return INTERVAL(
|
||||
day_precision=interval.day_precision,
|
||||
second_precision=interval.second_precision,
|
||||
)
|
||||
|
||||
@property
|
||||
def _type_affinity(self):
|
||||
return sqltypes.Interval
|
||||
@@ -214,6 +231,18 @@ class INTERVAL(sqltypes.NativeForEmulated, sqltypes._AbstractInterval):
|
||||
day_precision=self.day_precision,
|
||||
)
|
||||
|
||||
@property
|
||||
def python_type(self) -> Type[dt.timedelta]:
|
||||
return dt.timedelta
|
||||
|
||||
def literal_processor(
|
||||
self, dialect: Dialect
|
||||
) -> Optional[_LiteralProcessorType[dt.timedelta]]:
|
||||
def process(value: dt.timedelta) -> str:
|
||||
return f"NUMTODSINTERVAL({value.total_seconds()}, 'SECOND')"
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class TIMESTAMP(sqltypes.TIMESTAMP):
|
||||
"""Oracle implementation of ``TIMESTAMP``, which supports additional
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -205,6 +205,7 @@ from .base import PGExecutionContext
|
||||
from .base import PGIdentifierPreparer
|
||||
from .base import REGCLASS
|
||||
from .base import REGCONFIG
|
||||
from .types import BIT
|
||||
from .types import BYTEA
|
||||
from .types import CITEXT
|
||||
from ... import exc
|
||||
@@ -237,6 +238,10 @@ class AsyncpgTime(sqltypes.Time):
|
||||
render_bind_cast = True
|
||||
|
||||
|
||||
class AsyncpgBit(BIT):
|
||||
render_bind_cast = True
|
||||
|
||||
|
||||
class AsyncpgByteA(BYTEA):
|
||||
render_bind_cast = True
|
||||
|
||||
@@ -566,6 +571,7 @@ class AsyncAdapt_asyncpg_cursor:
|
||||
async def _executemany(self, operation, seq_of_parameters):
|
||||
adapt_connection = self._adapt_connection
|
||||
|
||||
self.description = None
|
||||
async with adapt_connection._execute_mutex:
|
||||
await adapt_connection._check_type_cache_invalidation(
|
||||
self._invalidate_schema_cache_asof
|
||||
@@ -818,10 +824,23 @@ class AsyncAdapt_asyncpg_connection(AdaptedConnection):
|
||||
|
||||
def ping(self):
|
||||
try:
|
||||
_ = self.await_(self._connection.fetchrow(";"))
|
||||
_ = self.await_(self._async_ping())
|
||||
except Exception as error:
|
||||
self._handle_exception(error)
|
||||
|
||||
async def _async_ping(self):
|
||||
if self._transaction is None and self.isolation_level != "autocommit":
|
||||
# create a tranasction explicitly to support pgbouncer
|
||||
# transaction mode. See #10226
|
||||
tr = self._connection.transaction()
|
||||
await tr.start()
|
||||
try:
|
||||
await self._connection.fetchrow(";")
|
||||
finally:
|
||||
await tr.rollback()
|
||||
else:
|
||||
await self._connection.fetchrow(";")
|
||||
|
||||
def set_isolation_level(self, level):
|
||||
if self._started:
|
||||
self.rollback()
|
||||
@@ -1002,6 +1021,7 @@ class PGDialect_asyncpg(PGDialect):
|
||||
{
|
||||
sqltypes.String: AsyncpgString,
|
||||
sqltypes.ARRAY: AsyncpgARRAY,
|
||||
BIT: AsyncpgBit,
|
||||
CITEXT: CITEXT,
|
||||
REGCONFIG: AsyncpgREGCONFIG,
|
||||
sqltypes.Time: AsyncpgTime,
|
||||
|
||||
@@ -303,7 +303,7 @@ Setting Alternate Search Paths on Connect
|
||||
------------------------------------------
|
||||
|
||||
The PostgreSQL ``search_path`` variable refers to the list of schema names
|
||||
that will be implicitly referred towards when a particular table or other
|
||||
that will be implicitly referenced when a particular table or other
|
||||
object is referenced in a SQL statement. As detailed in the next section
|
||||
:ref:`postgresql_schema_reflection`, SQLAlchemy is generally organized around
|
||||
the concept of keeping this variable at its default value of ``public``,
|
||||
@@ -1376,8 +1376,8 @@ Built-in support for rendering a ``ROW`` may be approximated using
|
||||
Table Types passed to Functions
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
PostgreSQL supports passing a table as an argument to a function, which it
|
||||
refers towards as a "record" type. SQLAlchemy :class:`_sql.FromClause` objects
|
||||
PostgreSQL supports passing a table as an argument to a function, which is
|
||||
known as a "record" type. SQLAlchemy :class:`_sql.FromClause` objects
|
||||
such as :class:`_schema.Table` support this special form using the
|
||||
:meth:`_sql.FromClause.table_valued` method, which is comparable to the
|
||||
:meth:`_functions.FunctionElement.table_valued` method except that the collection
|
||||
@@ -1868,6 +1868,9 @@ class PGCompiler(compiler.SQLCompiler):
|
||||
value = value.replace("\\", "\\\\")
|
||||
return value
|
||||
|
||||
def visit_aggregate_strings_func(self, fn, **kw):
|
||||
return "string_agg%s" % self.function_argspec(fn)
|
||||
|
||||
def visit_sequence(self, seq, **kw):
|
||||
return "nextval('%s')" % self.preparer.format_sequence(seq)
|
||||
|
||||
@@ -4114,9 +4117,13 @@ class PGDialect(default.DefaultDialect):
|
||||
|
||||
@util.memoized_property
|
||||
def _fk_regex_pattern(self):
|
||||
# optionally quoted token
|
||||
qtoken = '(?:"[^"]+"|[A-Za-z0-9_]+?)'
|
||||
|
||||
# https://www.postgresql.org/docs/current/static/sql-createtable.html
|
||||
return re.compile(
|
||||
r"FOREIGN KEY \((.*?)\) REFERENCES (?:(.*?)\.)?(.*?)\((.*?)\)"
|
||||
r"FOREIGN KEY \((.*?)\) "
|
||||
rf"REFERENCES (?:({qtoken})\.)?({qtoken})\(((?:{qtoken}(?: *, *)?)+)\)" # noqa: E501
|
||||
r"[\s]?(MATCH (FULL|PARTIAL|SIMPLE)+)?"
|
||||
r"[\s]?(ON UPDATE "
|
||||
r"(CASCADE|RESTRICT|NO ACTION|SET NULL|SET DEFAULT)+)?"
|
||||
|
||||
@@ -254,7 +254,7 @@ equivalent to psycopg2's ``execute_values()`` handler; an overview of this
|
||||
feature and its configuration are at :ref:`engine_insertmanyvalues`.
|
||||
|
||||
.. versionadded:: 2.0 Replaced psycopg2's ``execute_values()`` fast execution
|
||||
helper with a native SQLAlchemy mechanism referred towards as
|
||||
helper with a native SQLAlchemy mechanism known as
|
||||
:ref:`insertmanyvalues <engine_insertmanyvalues>`.
|
||||
|
||||
The psycopg2 dialect retains the ability to use the psycopg2-specific
|
||||
|
||||
@@ -293,7 +293,7 @@ class Range(Generic[_T]):
|
||||
else:
|
||||
return 0
|
||||
|
||||
def __eq__(self, other: Any) -> bool: # type: ignore[override] # noqa: E501
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
"""Compare this range to the `other` taking into account
|
||||
bounds inclusivity, returning ``True`` if they are equal.
|
||||
"""
|
||||
|
||||
@@ -18,7 +18,9 @@ from ...sql import type_api
|
||||
from ...util.typing import Literal
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ...engine.interfaces import Dialect
|
||||
from ...sql.operators import OperatorType
|
||||
from ...sql.type_api import _LiteralProcessorType
|
||||
from ...sql.type_api import TypeEngine
|
||||
|
||||
_DECIMAL_TYPES = (1231, 1700)
|
||||
@@ -247,6 +249,14 @@ class INTERVAL(type_api.NativeForEmulated, sqltypes._AbstractInterval):
|
||||
def python_type(self) -> Type[dt.timedelta]:
|
||||
return dt.timedelta
|
||||
|
||||
def literal_processor(
|
||||
self, dialect: Dialect
|
||||
) -> Optional[_LiteralProcessorType[dt.timedelta]]:
|
||||
def process(value: dt.timedelta) -> str:
|
||||
return f"make_interval(secs=>{value.total_seconds()})"
|
||||
|
||||
return process
|
||||
|
||||
|
||||
PGInterval = INTERVAL
|
||||
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user