diff --git a/pyrightconfig.stricter.json b/pyrightconfig.stricter.json index 0f9e8a25856b..54f02cc9feca 100644 --- a/pyrightconfig.stricter.json +++ b/pyrightconfig.stricter.json @@ -76,6 +76,7 @@ "stubs/peewee", "stubs/pexpect", "stubs/pika", + "stubs/pony", "stubs/protobuf", "stubs/psutil", "stubs/psycopg2", diff --git a/stubs/pony/@tests/stubtest_allowlist.txt b/stubs/pony/@tests/stubtest_allowlist.txt new file mode 100644 index 000000000000..5592e76d682c --- /dev/null +++ b/stubs/pony/@tests/stubtest_allowlist.txt @@ -0,0 +1,14 @@ +# Tests should not be part of the stubs +pony.orm.tests.* + +# Modules with ImportError, cannot import third-party libraries: +pony.flask.* +pony.orm.dbproviders.cockroach +pony.orm.dbproviders.mysql +pony.orm.dbproviders.oracle +pony.orm.dbproviders.postgres +pony.orm.integration.bottle_plugin +pony.orm.examples.bottle_example + +# TODO: Incomplete issues in examples dir: +pony.orm.examples.* diff --git a/stubs/pony/METADATA.toml b/stubs/pony/METADATA.toml new file mode 100644 index 000000000000..4110f557f94f --- /dev/null +++ b/stubs/pony/METADATA.toml @@ -0,0 +1,3 @@ +version = "0.7.*" +upstream_repository = "https://github.com/ponyorm/pony" +requires = ["types-psycopg2", "types-PyMySQL"] diff --git a/stubs/pony/pony/__init__.pyi b/stubs/pony/pony/__init__.pyi new file mode 100644 index 000000000000..0bcdbcbd0c60 --- /dev/null +++ b/stubs/pony/pony/__init__.pyi @@ -0,0 +1,14 @@ +from typing import Final, Literal +from typing_extensions import TypeAlias + +_Mode: TypeAlias = Literal[ + "GAE-LOCAL", "GAE-SERVER", "MOD_WSGI", "INTERACTIVE", "FCGI-FLUP", "UWSGI", "FLASK", "CHERRYPY", "BOTTLE", "UNKNOWN" +] +__version__: Final[str] + +def detect_mode() -> _Mode: ... + +MODE: Final[_Mode] +MAIN_FILE: Final[str | None] +MAIN_DIR: Final[str | None] +PONY_DIR: Final[str] diff --git a/stubs/pony/pony/converting.pyi b/stubs/pony/pony/converting.pyi new file mode 100644 index 000000000000..631da5e69398 --- /dev/null +++ b/stubs/pony/pony/converting.pyi @@ -0,0 +1,48 @@ +import re +from _typeshed import ConvertibleToInt +from collections.abc import Callable, Sequence +from datetime import date, datetime, time, timedelta +from typing import Any, Literal + +class ValidationError(ValueError): ... + +def check_ip(s: str) -> str: ... +def check_positive(s: ConvertibleToInt) -> int: ... +def check_identifier(s: str) -> str: ... + +isbn_re: re.Pattern[str] + +def isbn10_checksum(digits: Sequence[ConvertibleToInt]) -> str: ... +def isbn13_checksum(digits: Sequence[ConvertibleToInt]) -> str: ... +def check_isbn(s: str, convert_to: Literal[10, 13] | None = None) -> str: ... +def isbn10_to_isbn13(s: str) -> str: ... +def isbn13_to_isbn10(s: str) -> str: ... + +email_re: re.Pattern[str] +rfc2822_email_re: re.Pattern[str] + +def check_email(s: str) -> str: ... +def check_rfc2822_email(s: str) -> str: ... + +date_str_list: list[str] +date_re_list: list[re.Pattern[str]] +time_str: str +time_re: re.Pattern[str] +datetime_re_list: list[re.Pattern[str]] +month_lists: list[list[str]] +month_list: list[str] +i: int +month: str +month_dict: dict[str, int] + +def str2date(s: str) -> date: ... +def str2time(s: str) -> time: ... +def str2datetime(s: str) -> datetime: ... +def str2timedelta(s: str) -> timedelta: ... +def timedelta2str(td: timedelta) -> str: ... + +converters: dict[type | str, tuple[Callable[[str], Any], type[str], str | None]] # Any type from types above + +def str2py( + value: str, type: str | type | tuple[Callable[[str], Any], type[str], str | None] | None +) -> Any: ... # Any type from types above diff --git a/stubs/pony/pony/flask/__init__.pyi b/stubs/pony/pony/flask/__init__.pyi new file mode 100644 index 000000000000..ce220f1b7d60 --- /dev/null +++ b/stubs/pony/pony/flask/__init__.pyi @@ -0,0 +1,17 @@ +from _typeshed import Incomplete +from types import ModuleType +from typing import Protocol + +# Protocol for flask.Flask class +class _Flask(Protocol): + def before_request(self, f): ... + def after_request(self, f): ... + def teardown_request(self, f): ... + +flask_lib: ModuleType +request: Incomplete + +class Pony: + app: _Flask | None + def __init__(self, app: _Flask | None = None) -> None: ... + def init_app(self, app: _Flask) -> None: ... diff --git a/stubs/pony/pony/flask/example/__init__.pyi b/stubs/pony/pony/flask/example/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/pony/pony/flask/example/app.pyi b/stubs/pony/pony/flask/example/app.pyi new file mode 100644 index 000000000000..b45476eba0d9 --- /dev/null +++ b/stubs/pony/pony/flask/example/app.pyi @@ -0,0 +1,8 @@ +from _typeshed import Incomplete + +from pony.flask import _Flask + +app: _Flask +login_manager: Incomplete + +def load_user(user_id): ... diff --git a/stubs/pony/pony/flask/example/config.pyi b/stubs/pony/pony/flask/example/config.pyi new file mode 100644 index 000000000000..0a7fba4bec91 --- /dev/null +++ b/stubs/pony/pony/flask/example/config.pyi @@ -0,0 +1 @@ +config: dict[str, bool | str | dict[str, str | bool]] diff --git a/stubs/pony/pony/flask/example/models.pyi b/stubs/pony/pony/flask/example/models.pyi new file mode 100644 index 000000000000..290f90e958b2 --- /dev/null +++ b/stubs/pony/pony/flask/example/models.pyi @@ -0,0 +1,10 @@ +from datetime import datetime + +from pony.orm.core import Database, Entity + +db: Database + +class User(Entity): + login: str + password: str + last_login: datetime | None diff --git a/stubs/pony/pony/flask/example/views.pyi b/stubs/pony/pony/flask/example/views.pyi new file mode 100644 index 000000000000..2043cc5ca582 --- /dev/null +++ b/stubs/pony/pony/flask/example/views.pyi @@ -0,0 +1,4 @@ +def index() -> str: ... +def login(): ... +def reg(): ... +def logout(): ... diff --git a/stubs/pony/pony/options.pyi b/stubs/pony/pony/options.pyi new file mode 100644 index 000000000000..c65f52a795c0 --- /dev/null +++ b/stubs/pony/pony/options.pyi @@ -0,0 +1,39 @@ +from typing import Final + +DEBUG: Final[bool] +STATIC_DIR: Final[None] +CUT_TRACEBACK: Final[bool] +STD_DOCTYPE: Final[str] +STD_STYLESHEETS: Final[list[tuple[str, ...]]] +BASE_STYLESHEETS_PLACEHOLDER: Final[str] +COMPONENT_STYLESHEETS_PLACEHOLDER: Final[str] +SCRIPTS_PLACEHOLDER: Final[str] +RELOADING_CHECK_INTERVAL: Final[float] +LOG_TO_SQLITE: Final[None] +LOGGING_LEVEL: Final[None] +LOGGING_PONY_LEVEL: Final[None] +MAX_SESSION_CTIME: Final[int] +MAX_SESSION_MTIME: Final[int] +MAX_LONGLIFE_SESSION: Final[int] +COOKIE_SERIALIZATION_TYPE: Final[str] +COOKIE_NAME: Final[str] +COOKIE_PATH: Final[str] +COOKIE_DOMAIN: Final[None] +HASH_ALGORITHM: Final[None] +SESSION_STORAGE: Final[None] +MEMCACHE: Final[None] +ALTERNATIVE_SESSION_MEMCACHE: Final[None] +ALTERNATIVE_ORM_MEMCACHE: Final[None] +ALTERNATIVE_TEMPLATING_MEMCACHE: Final[None] +ALTERNATIVE_RESPONSE_MEMCACHE: Final[None] +PICKLE_START_OFFSET: Final[int] +PICKLE_HTML_AS_PLAIN_STR: Final[bool] +RESTORE_ESCAPES: Final[bool] +SOURCE_ENCODING: Final[None] +CONSOLE_ENCODING: Final[None] +MAX_FETCH_COUNT: Final[None] +CONSOLE_WIDTH: Final[int] +SIMPLE_ALIASES: Final[bool] +INNER_JOIN_SYNTAX: Final[bool] +DEBUGGING_REMOVE_ADDR: Final[bool] +DEBUGGING_RESTORE_ESCAPES: Final[bool] diff --git a/stubs/pony/pony/orm/__init__.pyi b/stubs/pony/pony/orm/__init__.pyi new file mode 100644 index 000000000000..e1dd6b0275cb --- /dev/null +++ b/stubs/pony/pony/orm/__init__.pyi @@ -0,0 +1 @@ +from pony.orm.core import * diff --git a/stubs/pony/pony/orm/asttranslation.pyi b/stubs/pony/pony/orm/asttranslation.pyi new file mode 100644 index 000000000000..6b3cfcc0e7fe --- /dev/null +++ b/stubs/pony/pony/orm/asttranslation.pyi @@ -0,0 +1,139 @@ +import ast +import sys +from _typeshed import Incomplete +from collections.abc import Callable, Generator +from typing import Any, TypeVar + +_T = TypeVar("_T") + +class TranslationError(Exception): ... + +pre_method_caches: dict[type[ASTTranslator], dict[type[ast.AST], Callable[..., Any]]] +post_method_caches: dict[type[ASTTranslator], dict[type[ast.AST], Callable[..., Any]]] + +class ASTTranslator: + tree: Incomplete + def __init__(translator, tree) -> None: ... + def dispatch(translator, node: ast.AST) -> None: ... + def call(translator, method: Callable[[ASTTranslator, ast.AST], _T], node: ast.AST) -> _T | None: ... + def default_pre(translator, node: ast.AST) -> None: ... + def default_post(translator, node: ast.AST) -> None: ... + +def priority(p: int): ... +def binop_src(op: str, node) -> str: ... +def ast2src(tree): ... +def get_child_nodes(node: ast.AST) -> Generator[ast.AST]: ... + +class PythonTranslator(ASTTranslator): + def __init__(translator, tree) -> None: ... + def call(translator, method, node) -> None: ... + def default_pre(translator, node: ast.AST): ... + def default_post(translator, node: ast.AST) -> None: ... + def postGeneratorExp(translator, node: ast.GeneratorExp) -> str: ... + def postcomprehension(translator, node: ast.comprehension) -> str: ... + def postGenExprIf(translator, node) -> str: ... + def postExpr(translator, node: ast.Expr) -> str: ... + def postIfExp(translator, node: ast.IfExp) -> str: ... + def postLambda(translator, node: ast.Lambda) -> str: ... + def postarguments(translator, node: ast.arguments) -> str: ... + def postarg(translator, node: ast.arg) -> str: ... + def postOr(translator, node: ast.Or) -> str: ... + def postAnd(translator, node: ast.And) -> str: ... + def postNot(translator, node: ast.Not) -> str: ... + def postCompare(translator, node: ast.Compare) -> str: ... + def postEq(translator, node: ast.Eq) -> str: ... + def postNotEq(translator, node: ast.NotEq) -> str: ... + def postLt(translator, node: ast.Lt) -> str: ... + def postLtE(translator, node: ast.LtE) -> str: ... + def postGt(translator, node: ast.Gt) -> str: ... + def postGtE(translator, node: ast.GtE) -> str: ... + def postIs(translator, node: ast.Is) -> str: ... + def postIsNot(translator, node: ast.IsNot) -> str: ... + def postIn(translator, node: ast.In) -> str: ... + def postNotIn(translator, node: ast.NotIn) -> str: ... + def postBitOr(translator, node: ast.BitOr) -> str: ... + def postBitXor(translator, node: ast.BitXor) -> str: ... + def postBitAnd(translator, node: ast.BitAnd) -> str: ... + def postLShift(translator, node: ast.LShift) -> str: ... + def postRShift(translator, node: ast.RShift) -> str: ... + def postAdd(translator, node: ast.Add) -> str: ... + def postSub(translator, node: ast.Sub) -> str: ... + def postMult(translator, node: ast.Mult): ... + def postMatMult(translator, node: ast.MatMult) -> None: ... + def postDiv(translator, node: ast.Div) -> str: ... + def postFloorDiv(translator, node: ast.FloorDiv) -> str: ... + def postMod(translator, node: ast.Mod) -> str: ... + def postUSub(translator, node: ast.USub) -> str: ... + def postUAdd(translator, node: ast.UAdd) -> str: ... + def postInvert(translator, node: ast.Invert) -> str: ... + def postPow(translator, node: ast.Pow) -> str: ... + def postAttribute(translator, node: ast.Attribute) -> str: ... + def postCall(translator, node: ast.Call) -> str: ... + def postkeyword(translator, node: ast.keyword) -> str: ... + def postStarred(translator, node: ast.Starred) -> str: ... + def postSubscript(translator, node: ast.Subscript) -> str: ... + def postIndex(translator, node: ast.Index) -> str: ... + def postSlice(translator, node: ast.Slice) -> str: ... + def postConstant(translator, node: ast.Constant) -> str: ... + if sys.version_info >= (3, 14): + def postNameConstant(translator, node: ast.Constant) -> str: ... + def postNum(translator, node: ast.Constant) -> str: ... + def postStr(translator, node: ast.Constant) -> str: ... + def postBytes(translator, node: ast.Constant) -> str: ... + else: + def postNameConstant(translator, node: ast.NameConstant) -> str: ... + def postNum(translator, node: ast.Num) -> str: ... + def postStr(translator, node: ast.Str) -> str: ... + def postBytes(translator, node: ast.Bytes) -> str: ... + + def postList(translator, node: ast.List) -> str: ... + def postTuple(translator, node: ast.Tuple) -> str: ... + def postDict(translator, node: ast.Dict) -> str: ... + def postSet(translator, node: ast.Set) -> str: ... + def postName(translator, node: ast.Name) -> str: ... + def postJoinedStr(self, node: ast.JoinedStr) -> str: ... + def postFormattedValue(self, node: ast.FormattedValue) -> str: ... + +nonexternalizable_types: tuple[type[ast.AST], ...] + +class PreTranslator(ASTTranslator): + def __init__(translator, tree, globals, locals, special_functions, const_functions, outer_names=()) -> None: ... + def dispatch(translator, node) -> None: ... + def preGeneratorExp(translator, node: ast.GeneratorExp) -> bool: ... + def preLambda(translator, node: ast.Lambda) -> bool: ... + def postName(translator, node: ast.Name) -> None: ... + def postSlice(translator, node: ast.Slice) -> None: ... + def postStarred(translator, node: ast.Starred) -> None: ... + def postConstant(translator, node: ast.Constant) -> None: ... + if sys.version_info >= (3, 14): + def postNum(translator, node: ast.Constant) -> None: ... + def postStr(translator, node: ast.Constant) -> None: ... + def postBytes(translator, node: ast.Constant) -> None: ... + else: + def postNum(translator, node: ast.Num) -> None: ... + def postStr(translator, node: ast.Str) -> None: ... + def postBytes(translator, node: ast.Bytes) -> None: ... + + def postDict(translator, node: ast.Dict) -> None: ... + def postList(translator, node: ast.List) -> None: ... + def postkeyword(translator, node: ast.keyword) -> None: ... + def postIndex(translator, node: ast.Index) -> None: ... + def postCall(translator, node: ast.Call) -> None: ... + def postCompare(translator, node: ast.Compare) -> None: ... + def post_binop(translator, node: ast.BinOp) -> None: ... + def postBitOr(translator, node: ast.BitOr) -> None: ... + def postBitXor(translator, node: ast.BitXor) -> None: ... + def postBitAnd(translator, node: ast.BitAnd) -> None: ... + def postLShift(translator, node: ast.LShift) -> None: ... + def postRShift(translator, node: ast.RShift) -> None: ... + def postAdd(translator, node: ast.Add) -> None: ... + def postSub(translator, node: ast.Sub) -> None: ... + def postMult(translator, node: ast.Mult) -> None: ... + def postMatMult(translator, node: ast.MatMult) -> None: ... + def postDiv(translator, node: ast.Div) -> None: ... + def postFloorDiv(translator, node: ast.FloorDiv) -> None: ... + def postMod(translator, node: ast.Mod) -> None: ... + +extractors_cache: dict[str | int, tuple[Incomplete, dict[Incomplete, Incomplete]]] + +def create_extractors(code_key: str | int, tree, globals, locals, special_functions, const_functions, outer_names=()): ... diff --git a/stubs/pony/pony/orm/core.pyi b/stubs/pony/pony/orm/core.pyi new file mode 100644 index 000000000000..d158530884ea --- /dev/null +++ b/stubs/pony/pony/orm/core.pyi @@ -0,0 +1,823 @@ +import ast +import itertools +import re +import types +from _typeshed import Incomplete +from collections import defaultdict +from collections.abc import Callable, Generator +from logging import Logger +from typing import Literal, NoReturn, TypeVar +from typing_extensions import Self, TypeAlias, deprecated + +import pony as pony +from pony.orm.asttranslation import TranslationError as TranslationError +from pony.orm.dbapiprovider import ( + DatabaseError as DatabaseError, + DataError as DataError, + DBException as DBException, + Error as Error, + IntegrityError as IntegrityError, + InterfaceError as InterfaceError, + InternalError as InternalError, + NotSupportedError as NotSupportedError, + OperationalError as OperationalError, + ProgrammingError as ProgrammingError, + Warning as Warning, +) +from pony.orm.ormtypes import ( + Array, + FloatArray as FloatArray, + IntArray as IntArray, + Json as Json, + LongStr as LongStr, + LongUnicode as LongUnicode, + StrArray as StrArray, + raw_sql as raw_sql, +) +from pony.py23compat import buffer as buffer, unicode as unicode +from pony.utils import between as between, coalesce as coalesce, concat as concat, localbase + +_T = TypeVar("_T") +_KnownProvider: TypeAlias = Literal["sqlite", "postgres", "mysql", "oracle"] + +__all__ = [ + "pony", + "DBException", + "RowNotFound", + "MultipleRowsFound", + "TooManyRowsFound", + "Warning", + "Error", + "InterfaceError", + "DatabaseError", + "DataError", + "OperationalError", + "IntegrityError", + "InternalError", + "ProgrammingError", + "NotSupportedError", + "OrmError", + "ERDiagramError", + "DBSchemaError", + "MappingError", + "BindingError", + "TableDoesNotExist", + "TableIsNotEmpty", + "ConstraintError", + "CacheIndexError", + "ObjectNotFound", + "MultipleObjectsFoundError", + "TooManyObjectsFoundError", + "OperationWithDeletedObjectError", + "TransactionError", + "ConnectionClosedError", + "TransactionIntegrityError", + "IsolationError", + "CommitException", + "RollbackException", + "UnrepeatableReadError", + "OptimisticCheckError", + "UnresolvableCyclicDependency", + "UnexpectedError", + "DatabaseSessionIsOver", + "PonyRuntimeWarning", + "DatabaseContainsIncorrectValue", + "DatabaseContainsIncorrectEmptyValue", + "TranslationError", + "ExprEvalError", + "PermissionError", + "Database", + "sql_debug", + "set_sql_debug", + "sql_debugging", + "show", + "PrimaryKey", + "Required", + "Optional", + "Set", + "Discriminator", + "composite_key", + "composite_index", + "flush", + "commit", + "rollback", + "db_session", + "with_transaction", + "make_proxy", + "LongStr", + "LongUnicode", + "Json", + "IntArray", + "StrArray", + "FloatArray", + "select", + "left_join", + "get", + "exists", + "delete", + "count", + "sum", + "min", + "max", + "avg", + "group_concat", + "distinct", + "JOIN", + "desc", + "between", + "concat", + "coalesce", + "raw_sql", + "buffer", + "unicode", + "get_current_user", + "set_current_user", + "perm", + "has_perm", + "get_user_groups", + "get_user_roles", + "get_object_labels", + "user_groups_getter", + "user_roles_getter", + "obj_labels_getter", +] + +suppress_debug_change: bool + +def sql_debug(value: bool) -> None: ... +def set_sql_debug(debug: bool = True, show_values=None) -> None: ... + +orm_logger: Logger +sql_logger: Logger +orm_log_level: int + +def log_orm(msg: object) -> None: ... +def args2str(args: list[object] | tuple[object] | dict[object, object]) -> str: ... + +class OrmError(Exception): ... +class ERDiagramError(OrmError): ... +class DBSchemaError(OrmError): ... +class MappingError(OrmError): ... +class BindingError(OrmError): ... +class TableDoesNotExist(OrmError): ... +class TableIsNotEmpty(OrmError): ... +class ConstraintError(OrmError): ... +class CacheIndexError(OrmError): ... +class RowNotFound(OrmError): ... +class MultipleRowsFound(OrmError): ... +class TooManyRowsFound(OrmError): ... +class PermissionError(OrmError): ... + +class ObjectNotFound(OrmError): + def __init__( + exc, entity: Entity, pkval: object | tuple[object, ...] | None = None # pkval passing to repr() builtins function + ) -> None: ... + +class MultipleObjectsFoundError(OrmError): ... +class TooManyObjectsFoundError(OrmError): ... +class OperationWithDeletedObjectError(OrmError): ... +class TransactionError(OrmError): ... +class ConnectionClosedError(TransactionError): ... + +class TransactionIntegrityError(TransactionError): + def __init__(exc, msg, original_exc=None) -> None: ... + +class CommitException(TransactionError): + def __init__(exc, msg, exceptions) -> None: ... + +class PartialCommitException(TransactionError): + def __init__(exc, msg, exceptions) -> None: ... + +class RollbackException(TransactionError): + def __init__(exc, msg, exceptions) -> None: ... + +class DatabaseSessionIsOver(TransactionError): ... + +TransactionRolledBack = DatabaseSessionIsOver + +class IsolationError(TransactionError): ... +class UnrepeatableReadError(IsolationError): ... +class OptimisticCheckError(IsolationError): ... +class UnresolvableCyclicDependency(TransactionError): ... + +class UnexpectedError(TransactionError): + def __init__(exc, msg, original_exc) -> None: ... + +class ExprEvalError(TranslationError): + def __init__(exc, src, cause) -> None: ... + +class PonyInternalException(Exception): ... +class OptimizationFailed(PonyInternalException): ... + +class UseAnotherTranslator(PonyInternalException): + translator: Incomplete + def __init__(self, translator) -> None: ... + +class PonyRuntimeWarning(RuntimeWarning): ... +class DatabaseContainsIncorrectValue(PonyRuntimeWarning): ... +class DatabaseContainsIncorrectEmptyValue(DatabaseContainsIncorrectValue): ... + +class PrefetchContext: + database: Incomplete + attrs_to_prefetch_dict: Incomplete + entities_to_prefetch: Incomplete + relations_to_prefetch_cache: Incomplete + def __init__(self, database=None) -> None: ... + def copy(self): ... + def __enter__(self) -> None: ... + def __exit__( + self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None + ) -> None: ... + def get_frozen_attrs_to_prefetch(self, entity): ... + def get_relations_to_prefetch(self, entity): ... + +class Local(localbase): + def __init__(local) -> None: ... + @property + def prefetch_context(local): ... + def push_debug_state(local, debug, show_values) -> None: ... + def pop_debug_state(local) -> None: ... + +local: Local + +def flush() -> None: ... +def commit() -> None: ... +def rollback() -> None: ... + +class DBSessionContextManager: + retry: int + ddl: bool + serializable: bool + immediate: bool + strict: bool + optimistic: bool + retry_exceptions: tuple[type[Exception], ...] + allowed_exceptions: tuple[type[Exception], ...] + sql_debug: bool | None + show_values: bool | None + def __init__( + db_session, + retry: int = 0, + immediate: bool = False, + ddl: bool = False, + serializable: bool = False, + strict: bool = False, + optimistic: bool = True, + retry_exceptions: tuple[type[Exception], ...] = ..., + allowed_exceptions: tuple[type[Exception], ...] = (), + sql_debug: bool | None = None, + show_values: bool | None = None, + ) -> None: ... + def __call__(db_session, *args, **kwargs): ... + def __enter__(db_session) -> None: ... + def __exit__(db_session, exc_type=None, exc=None, tb=None) -> None: ... + +db_session: DBSessionContextManager + +class SQLDebuggingContextManager: + debug: bool + show_values: Incomplete + def __init__(self, debug: bool = True, show_values=None) -> None: ... + def __call__(self, *args, **kwargs): ... + def __enter__(self) -> None: ... + def __exit__(self, exc_type=None, exc=None, tb=None) -> None: ... + +sql_debugging: SQLDebuggingContextManager + +def throw_db_session_is_over(action: str, obj: Entity, attr: Attribute | None = None) -> NoReturn: ... +@deprecated("@with_transaction decorator is deprecated, use @db_session decorator instead.") +def with_transaction(*args, **kwargs): ... + +known_providers: tuple[_KnownProvider, ...] + +class OnConnectDecorator: + @staticmethod + def check_provider(provider: str | None) -> None: ... + provider: _KnownProvider | None + database: Incomplete + def __init__(self, database: Database, provider: str | None) -> None: ... + def __call__(self, func: types.FunctionType | None = None, provider: str | None = None) -> Self: ... + +db_id_counter: itertools.count[int] + +class Database: + def __deepcopy__(self, memo) -> Self: ... + id: Incomplete + priority: int + entities: Incomplete + schema: Incomplete + Entity: type[Entity] + on_connect: OnConnectDecorator + provider: Incomplete + def __init__(self, *args, **kwargs) -> None: ... + def call_on_connect(database, con) -> None: ... + def bind(self, *args, **kwargs) -> None: ... + @property + def last_sql(database): ... + @property + def local_stats(database): ... + def merge_local_stats(database) -> None: ... + @property + def global_stats(database): ... + @property + @deprecated("global_stats_lock is deprecated, just use global_stats property without any locking.") + def global_stats_lock(database): ... + def get_connection(database): ... + def disconnect(database) -> None: ... + def flush(database) -> None: ... + def commit(database) -> None: ... + def rollback(database) -> None: ... + def execute(database, sql, globals=None, locals=None): ... + def select(database, sql, globals=None, locals=None, frame_depth: int = 0): ... + def get(database, sql, globals=None, locals=None): ... + def exists(database, sql, globals=None, locals=None): ... + def insert(database, table_name, returning=None, **kwargs): ... + def generate_mapping(database, filename=None, check_tables: bool = True, create_tables: bool = False): ... + def drop_table(database, table_name, if_exists: bool = False, with_all_data: bool = False) -> None: ... + def drop_all_tables(database, with_all_data: bool = False) -> None: ... + def create_tables(database, check_tables: bool = False) -> None: ... + def check_tables(database) -> None: ... + def set_perms_for(database, *entities) -> Generator[None]: ... + def to_json(database, data, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ... + def from_json(database, changes, observer=None): ... + +def basic_converter(x): ... +def perm(*args, **kwargs) -> AccessRule: ... +def pop_names_from_kwargs(typename, kwargs, *kwnames): ... + +class AccessRule: + def __init__(rule, database, entities, permissions, groups, roles, labels) -> None: ... + def exclude(rule, *args) -> None: ... + +def has_perm(user, perm, x) -> bool: ... +def can_view(user, x) -> bool: ... +def can_edit(user, x) -> bool: ... +def can_create(user, x) -> bool: ... +def can_delete(user, x) -> bool: ... +def get_current_user(): ... +def set_current_user(user) -> None: ... + +anybody_frozenset: frozenset[str] + +def get_user_groups(user): ... +def get_user_roles(user, obj): ... +def get_object_labels(obj): ... + +usergroup_functions: list[Incomplete] + +def user_groups_getter(cls=None): ... + +userrole_functions: list[Incomplete] + +def user_roles_getter(user_cls=None, obj_cls=None): ... + +objlabel_functions: list[Incomplete] + +def obj_labels_getter(cls=None): ... + +class DbLocal(localbase): + stats: Incomplete + last_sql: Incomplete + def __init__(dblocal) -> None: ... + +class QueryStat: + def __init__(stat, sql, duration=None) -> None: ... + def copy(stat): ... + def query_executed(stat, duration) -> None: ... + def merge(stat, stat2) -> None: ... + @property + def avg_time(stat): ... + +num_counter: itertools.count[int] + +class SessionCache: + is_alive: bool + num: int + database: Database + objects: set[Incomplete] + indexes: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None + seeds: defaultdict[Incomplete, set[Incomplete]] | None + max_id_cache: dict[Incomplete, Incomplete] | None + collection_statistics: dict[Incomplete, Incomplete] | None + for_update: set[Incomplete] | None + noflush_counter: int + modified_collections: defaultdict[Incomplete, set[Incomplete]] | None + objects_to_save: list[Incomplete] | None + saved_objects: list[Incomplete] | None + query_results: dict[Incomplete, Incomplete] | None + dbvals_deduplication_cache: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None + modified: bool + db_session: Incomplete + immediate: bool + connection: Incomplete + in_transaction: bool + saved_fk_state: Incomplete + perm_cache: Incomplete + user_roles_cache: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None + obj_labels_cache: dict[Incomplete, Incomplete] | None + def __init__(cache, database: Database) -> None: ... + def connect(cache): ... + def reconnect(cache, exc): ... + def prepare_connection_for_query_execution(cache): ... + def flush_and_commit(cache) -> None: ... + def commit(cache) -> None: ... + def rollback(cache) -> None: ... + def release(cache) -> None: ... + def close(cache, rollback: bool = True) -> None: ... + def flush_disabled(cache) -> Generator[None]: ... + def flush(cache) -> None: ... + def call_after_save_hooks(cache) -> None: ... + def update_simple_index(cache, obj, attr, old_val, new_val, undo) -> None: ... + def db_update_simple_index(cache, obj, attr, old_dbval, new_dbval) -> None: ... + def update_composite_index(cache, obj, attrs, prev_vals, new_vals, undo) -> None: ... + def db_update_composite_index(cache, obj, attrs, prev_vals, new_vals) -> None: ... + +class NotLoadedValueType: ... + +NOT_LOADED: NotLoadedValueType + +class DefaultValueType: ... + +DEFAULT: DefaultValueType + +class DescWrapper: + attr: Attribute + def __init__(self, attr: Attribute) -> None: ... + def __call__(self) -> Self: ... + def __eq__(self, other: object) -> bool: ... + def __ne__(self, other: object) -> bool: ... + def __hash__(self) -> int: ... + +attr_id_counter: itertools.count[int] + +class Attribute: + nullable: bool | None + is_required: bool + is_discriminator: bool + is_unique: bool | None + is_part_of_unique_index: bool | None + is_pk: bool + is_collection: bool + is_relation: bool + is_basic: bool + is_string: bool + is_volatile: bool + is_implicit: bool + id: int + pk_offset: int | None + pk_columns_offset: int + py_type: type | str | types.FunctionType | Array + sql_type: Incomplete + entity: Incomplete + name: Incomplete + lazy: bool + lazy_sql_cache: Incomplete + args: tuple[Incomplete, ...] + auto: bool + default: Incomplete + reverse: str | Attribute | None + composite_keys: list[tuple[Incomplete, int]] + column: str | None + columns: list[str] | tuple[str, ...] + col_paths: list[Incomplete] + converters: list[Incomplete] + kwargs: dict[str, Incomplete] + cascade_delete: bool | None + index: str | bool | None + reverse_index: Incomplete + original_default: Incomplete + sql_default: str | bool | None + py_check: Callable[..., bool] | None + hidden: bool + optimistic: bool | None + fk_name: str | None + type_has_empty_value: bool + interleave: bool | None + def __deepcopy__(attr, memo): ... + def __init__(attr, py_type: type | str | types.FunctionType | Array, *args, **kwargs) -> None: ... + def linked(attr) -> None: ... + def __lt__(attr, other): ... + def validate(attr, val, obj=None, entity=None, from_db: bool = False): ... + def parse_value(attr, row, offsets, dbvals_deduplication_cache): ... + def load(attr, obj: Entity): ... + def __get__(attr, obj, cls=None): ... + def get(attr, obj): ... + def __set__(attr, obj, new_val, undo_funcs=None) -> None: ... + def db_set(attr, obj, new_dbval, is_reverse_call: bool = False) -> None: ... + def update_reverse(attr, obj, old_val, new_val, undo_funcs) -> None: ... + def db_update_reverse(attr, obj, old_dbval, new_dbval) -> None: ... + def __delete__(attr, obj) -> None: ... + def get_raw_values(attr, val): ... + def get_columns(attr) -> list[str] | tuple[str, ...]: ... + @property + def asc(attr) -> Self: ... + @property + def desc(attr) -> DescWrapper: ... + def describe(attr) -> str: ... + +class Optional(Attribute): ... + +class Required(Attribute): + def validate(attr, val, obj=None, entity=None, from_db: bool = False): ... + +class Discriminator(Required): + code2cls: dict[Incomplete, Incomplete] + def __init__(attr, py_type, *args, **kwargs) -> None: ... + @staticmethod + def create_default_attr(entity) -> None: ... + def process_entity_inheritance(attr, entity) -> None: ... + def validate(attr, val, obj=None, entity=None, from_db: bool = False): ... + def load(attr, obj) -> None: ... + def __get__(attr, obj, cls=None): ... + def __set__(attr, obj, new_val) -> None: ... # type: ignore[override] + def db_set(attr, obj, new_dbval) -> None: ... # type: ignore[override] + def update_reverse(attr, obj, old_val, new_val, undo_funcs) -> None: ... + +class Index: + entity: Incomplete + attrs: list[Incomplete] + is_pk: bool + is_unique: bool + def __init__(index, *attrs, **options) -> None: ... + +def composite_index(*attrs) -> None: ... +def composite_key(*attrs) -> None: ... + +class PrimaryKey(Required): + def __new__(cls, *args, **kwargs): ... + +class Collection(Attribute): + table: str | list[str] | tuple[str, ...] | None + wrapper_class: Incomplete + symmetric: bool + reverse_column: Incomplete + reverse_columns: Incomplete + nplus1_threshold: int + cached_load_sql: dict[int, Incomplete] + cached_add_m2m_sql: tuple[Incomplete, Incomplete] | None + cached_remove_m2m_sql: tuple[Incomplete, Incomplete] | None + cached_count_sql: tuple[Incomplete, Incomplete] | None + cached_empty_sql: tuple[Incomplete, Incomplete, Incomplete] | None + reverse_fk_name: Incomplete + def __init__(attr, py_type, *args, **kwargs) -> None: ... + def load(attr, obj) -> None: ... + def __get__(attr, obj, cls=None) -> None: ... + def __set__(attr, obj, val) -> None: ... # type: ignore[override] + def __delete__(attr, obj) -> None: ... + def prepare(attr, obj, val, fromdb: bool = False) -> None: ... + def set(attr, obj, val, fromdb: bool = False) -> None: ... + +class SetData(set[Incomplete]): + is_fully_loaded: bool + added: Incomplete + removed: Incomplete + absent: Incomplete + count: int | None + def __init__(setdata) -> None: ... + +def construct_batchload_criteria_list( + alias, columns, converters, batch_size, row_value_syntax, start: int = 0, from_seeds: bool = True +): ... + +class Set(Collection): + def validate(attr, val, obj=None, entity=None, from_db: bool = False): ... + def prefetch_load_all(attr, objects): ... + def load(attr, obj, items=None): ... + def construct_sql_m2m(attr, batch_size: int = 1, items_count: int = 0): ... + def copy(attr, obj): ... + def __get__(attr, obj, cls=None): ... + def __set__(attr, obj, new_items, undo_funcs=None) -> None: ... + def __delete__(attr, obj) -> None: ... + def reverse_add(attr, objects, item, undo_funcs) -> None: ... + def db_reverse_add(attr, objects, item) -> None: ... + def reverse_remove(attr, objects, item, undo_funcs) -> None: ... + def db_reverse_remove(attr, objects, item) -> None: ... + def get_m2m_columns(attr, is_reverse: bool = False): ... + def remove_m2m(attr, removed) -> None: ... + def add_m2m(attr, added) -> None: ... + def drop_table(attr, with_all_data: bool = False) -> None: ... + +def unpickle_setwrapper(obj, attrname, items): ... + +class SetIterator: + def __init__(self, wrapper) -> None: ... + def __iter__(self): ... + def next(self): ... + __next__ = next + +class SetInstance: + def __init__(wrapper, obj, attr) -> None: ... + def __reduce__(wrapper): ... + def copy(wrapper): ... + def __nonzero__(wrapper): ... + def is_empty(wrapper): ... + def __len__(wrapper) -> int: ... + def count(wrapper): ... + def __iter__(wrapper): ... + def __eq__(wrapper, other): ... + def __ne__(wrapper, other): ... + def __add__(wrapper, new_items): ... + def __sub__(wrapper, items): ... + def __contains__(wrapper, item) -> bool: ... + def create(wrapper, **kwargs): ... + def add(wrapper, new_items) -> None: ... + def __iadd__(wrapper, items): ... + def remove(wrapper, items) -> None: ... + def __isub__(wrapper, items): ... + def clear(wrapper) -> None: ... + def load(wrapper) -> None: ... + def select(wrapper, *args, **kwargs): ... + filter = select + def limit(wrapper, limit=None, offset=None): ... + def page(wrapper, pagenum, pagesize: int = 10): ... + def order_by(wrapper, *args): ... + def sort_by(wrapper, *args): ... + def random(wrapper, limit): ... + +def unpickle_multiset(obj, attrnames, items): ... + +class Multiset: + def __init__(multiset, obj, attrnames, items) -> None: ... + def __reduce__(multiset): ... + def distinct(multiset): ... + def __nonzero__(multiset): ... + def __len__(multiset) -> int: ... + def __iter__(multiset): ... + def __eq__(multiset, other): ... + def __ne__(multiset, other): ... + def __contains__(multiset, item) -> bool: ... + +class EntityIter: + entity: Incomplete + def __init__(self, entity) -> None: ... + def next(self) -> None: ... + __next__ = next + +entity_id_counter: itertools.count[int] +new_instance_id_counter: itertools.count[int] +select_re: re.Pattern[str] +lambda_re: re.Pattern[str] + +class EntityMeta(type): + def __new__(meta, name, bases, cls_dict): ... + def __init__(entity, name, bases, cls_dict) -> None: ... + def __iter__(entity): ... + def __getitem__(entity, key): ... + def exists(entity, *args, **kwargs): ... + def get(entity, *args, **kwargs): ... + def get_for_update(entity, *args, **kwargs): ... + def get_by_sql(entity, sql, globals=None, locals=None): ... + def select(entity, *args, **kwargs): ... + def select_by_sql(entity, sql, globals=None, locals=None): ... + def select_random(entity, limit): ... + def describe(entity) -> str: ... + def drop_table(entity, with_all_data: bool = False) -> None: ... + +def populate_criteria_list( + criteria_list, columns, converters, operations, params_count: int = 0, table_alias=None, optimistic: bool = False +) -> int: ... + +statuses: set[str] +del_statuses: set[str] +created_or_deleted_statuses: set[str] +saved_statuses: set[str] + +def throw_object_was_deleted(obj: Entity) -> NoReturn: ... +def unpickle_entity(d): ... +def safe_repr(obj: Entity) -> str: ... +def make_proxy(obj: Entity) -> EntityProxy: ... + +class EntityProxy: + def __init__(self, obj: Entity) -> None: ... + def __getattr__(self, name: str): ... + def __setattr__(self, name: str, value) -> None: ... + def __eq__(self, other) -> bool: ... + def __ne__(self, other) -> bool: ... + +class Entity(metaclass=EntityMeta): + def __reduce__(obj): ... + def __init__(obj, *args, **kwargs) -> None: ... + def get_pk(obj): ... + def __lt__(entity, other): ... + def __le__(entity, other): ... + def __gt__(entity, other): ... + def __ge__(entity, other): ... + def load(obj, *attrs) -> None: ... + def delete(obj) -> None: ... + def set(obj, **kwargs) -> None: ... + def find_updated_attributes(obj): ... + def flush(obj) -> None: ... + def before_insert(obj) -> None: ... + def before_update(obj) -> None: ... + def before_delete(obj) -> None: ... + def after_insert(obj) -> None: ... + def after_update(obj) -> None: ... + def after_delete(obj) -> None: ... + def to_dict( + obj, only=None, exclude=None, with_collections: bool = False, with_lazy: bool = False, related_objects: bool = False + ): ... + def to_json(obj, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ... + +def string2ast(s: str) -> ast.Expr: ... +def get_globals_and_locals(args, kwargs, frame_depth, from_generator=False): ... +def make_query(args, frame_depth, left_join: bool = False) -> Query: ... +def select(*args): ... +def left_join(*args): ... +def get(*args): ... +def exists(*args): ... +def delete(*args): ... +def make_aggrfunc(std_func): ... + +count: Incomplete +sum: Incomplete +min: Incomplete +max: Incomplete +avg: Incomplete +group_concat: Incomplete +distinct: Incomplete + +def JOIN(expr: _T) -> _T: ... +def desc(expr): ... +def extract_vars(code_key, filter_num, extractors, globals, locals, cells=None): ... +def unpickle_query(query_result: _T) -> _T: ... + +class Query: + def __init__(query, code_key, tree, globals, locals, cells=None, left_join: bool = False) -> None: ... + def __reduce__(query): ... + def get_sql(query): ... + def prefetch(query, *args): ... + def show(query, width=None, stream=None) -> None: ... + def get(query): ... + def first(query): ... + def without_distinct(query): ... + def distinct(query): ... + def exists(query): ... + def delete(query, bulk=None): ... + def __len__(query) -> int: ... + def __iter__(query): ... + def order_by(query, *args): ... + def sort_by(query, *args): ... + def filter(query, *args, **kwargs): ... + def where(query, *args, **kwargs): ... + def __getitem__(query, key): ... + def fetch(query, limit=None, offset=None): ... + def limit(query, limit=None, offset=None): ... + def page(query, pagenum, pagesize: int = 10): ... + def sum(query, distinct=None): ... + def avg(query, distinct=None): ... + def group_concat(query, sep=None, distinct=None): ... + def min(query): ... + def max(query): ... + def count(query, distinct=None): ... + def for_update(query, nowait: bool = False, skip_locked: bool = False): ... + def random(query, limit): ... + def to_json(query, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ... + +class QueryResultIterator: + def __init__(self, query_result) -> None: ... + def next(self): ... + __next__ = next + def __length_hint__(self) -> int: ... + +def make_query_result_method_error_stub(name: str, title: str | None = None) -> Callable[..., NoReturn]: ... + +class QueryResult: + def __init__(self, query, limit, offset, lazy) -> None: ... + def __iter__(self): ... + def __len__(self) -> int: ... + def __getitem__(self, key): ... + def __contains__(self, item) -> bool: ... + def index(self, item): ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __lt__(self, other): ... + def __le__(self, other): ... + def __gt__(self, other): ... + def __ge__(self, other): ... + def __reversed__(self): ... + def reverse(self) -> None: ... + def sort(self, *args, **kwargs) -> None: ... + def shuffle(self) -> None: ... + def show(self, width=None, stream=None): ... + def to_json(self, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ... + def __add__(self, other): ... + def __radd__(self, other): ... + def to_list(self): ... + __setitem__: Incomplete + __delitem__: Incomplete + __iadd__: Incomplete + __imul__: Incomplete + __mul__: Incomplete + __rmul__: Incomplete + append: Incomplete + clear: Incomplete + extend: Incomplete + insert: Incomplete + pop: Incomplete + remove: Incomplete + +def strcut(s: str, width: int) -> str: ... +def show(entity) -> None: ... + +special_functions: set[Incomplete] +const_functions: set[type] diff --git a/stubs/pony/pony/orm/dbapiprovider.pyi b/stubs/pony/pony/orm/dbapiprovider.pyi new file mode 100644 index 000000000000..ce0c0a5e3eff --- /dev/null +++ b/stubs/pony/pony/orm/dbapiprovider.pyi @@ -0,0 +1,216 @@ +import json +import re +import types +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import ClassVar + +from pony.utils import localbase + +class DBException(Exception): + def __init__(exc, original_exc, *args) -> None: ... + +class Warning(DBException): ... +class Error(DBException): ... +class InterfaceError(Error): ... +class DatabaseError(Error): ... +class DataError(DatabaseError): ... +class OperationalError(DatabaseError): ... +class IntegrityError(DatabaseError): ... +class InternalError(DatabaseError): ... +class ProgrammingError(DatabaseError): ... +class NotSupportedError(DatabaseError): ... + +def wrap_dbapi_exceptions(func, provider, *args, **kwargs): ... +def unexpected_args(attr, args) -> None: ... + +version_re: re.Pattern[str] + +def get_version_tuple(s: str): ... + +class DBAPIProvider: + paramstyle: ClassVar[str] + quote_char: ClassVar[str] + max_params_count: ClassVar[int] + max_name_len: ClassVar[int] + table_if_not_exists_syntax: ClassVar[bool] + index_if_not_exists_syntax: ClassVar[bool] + max_time_precision: ClassVar[int] + default_time_precision: ClassVar[int] + uint64_support: ClassVar[bool] + varchar_default_max_len: ClassVar[int | None] + dialect: ClassVar[str | None] + dbapi_module: ClassVar[types.ModuleType | None] + dbschema_cls: ClassVar[type | None] + translator_cls: ClassVar[type | None] + sqlbuilder_cls: ClassVar[type | None] + array_converter_cls: ClassVar[type | None] + name_before_table: ClassVar[str] + default_schema_name: ClassVar[str | None] + fk_types: ClassVar[dict[str, str]] + converter_classes: Incomplete + def __init__(provider, _database, *args, **kwargs) -> None: ... + def inspect_connection(provider, connection) -> None: ... + def normalize_name(provider, name): ... + def get_default_entity_table_name(provider, entity): ... + def get_default_m2m_table_name(provider, attr, reverse): ... + def get_default_column_names(provider, attr, reverse_pk_columns=None): ... + def get_default_m2m_column_names(provider, entity): ... + def get_default_index_name( + provider, table_name, column_names, is_pk: bool = False, is_unique: bool = False, m2m: bool = False + ): ... + def get_default_fk_name(provider, child_table_name, parent_table_name, child_column_names): ... + def split_table_name(provider, table_name): ... + def base_name(provider, name): ... + def quote_name(provider, name: str | Iterable[str]) -> str: ... + def format_table_name(provider, name): ... + def normalize_vars(provider, vars, vartypes) -> None: ... + def ast2sql(provider, ast): ... + def should_reconnect(provider, exc): ... + def connect(provider): ... + def set_transaction_mode(provider, connection, cache) -> None: ... + def commit(provider, connection, cache=None) -> None: ... + def rollback(provider, connection, cache=None) -> None: ... + def release(provider, connection, cache=None) -> None: ... + def drop(provider, connection, cache=None) -> None: ... + def disconnect(provider) -> None: ... + def execute(provider, cursor, sql, arguments=None, returning_id: bool = False): ... + def get_converter_by_py_type(provider, py_type): ... + def get_converter_by_attr(provider, attr): ... + def get_pool(provider, *args, **kwargs): ... + def table_exists(provider, connection, table_name, case_sensitive: bool = True) -> None: ... + def index_exists(provider, connection, table_name, index_name, case_sensitive: bool = True) -> None: ... + def fk_exists(provider, connection, table_name, fk_name, case_sensitive: bool = True) -> None: ... + def table_has_data(provider, connection, table_name): ... + def disable_fk_checks(provider, connection) -> None: ... + def enable_fk_checks(provider, connection, prev_state) -> None: ... + def drop_table(provider, connection, table_name) -> None: ... + +class Pool(localbase): + forked_connections: list[tuple[Incomplete, int | None]] + dbapi_module: types.ModuleType + args: tuple[Incomplete, ...] + kwargs: dict[str, Incomplete] + con: Incomplete + pid: int | None + def __init__(pool, dbapi_module: types.ModuleType, *args, **kwargs) -> None: ... + def connect(pool) -> tuple[Incomplete, bool]: ... + def release(pool, con) -> None: ... + def drop(pool, con) -> None: ... + def disconnect(pool) -> None: ... + +class Converter: + EQ: str + NE: str + optimistic: bool + def __deepcopy__(converter, memo): ... + def __init__(converter, provider, py_type, attr=None) -> None: ... + def init(converter, kwargs) -> None: ... + def validate(converter, val, obj=None): ... + def py2sql(converter, val): ... + def sql2py(converter, val): ... + def val2dbval(self, val, obj=None): ... + def dbval2val(self, dbval, obj=None): ... + def dbvals_equal(self, x, y): ... + def get_sql_type(converter, attr=None): ... + def get_fk_type(converter, sql_type): ... + +class NoneConverter(Converter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def get_sql_type(converter, attr=None) -> None: ... + def get_fk_type(converter, sql_type) -> None: ... + +class BoolConverter(Converter): + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class StrConverter(Converter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def init(converter, kwargs) -> None: ... + def validate(converter, val, obj=None): ... + def sql_type(converter): ... + +class IntConverter(Converter): + signed_types: Incomplete + unsigned_types: Incomplete + def init(converter, kwargs) -> None: ... + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class RealConverter(Converter): + EQ: str + NE: str + default_tolerance: float + optimistic: bool + def init(converter, kwargs) -> None: ... + def validate(converter, val, obj=None): ... + def dbvals_equal(converter, x, y): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class DecimalConverter(Converter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def init(converter, kwargs) -> None: ... + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class BlobConverter(Converter): + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class DateConverter(Converter): + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class ConverterWithMicroseconds(Converter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def init(converter, kwargs) -> None: ... + def round_microseconds_to_precision(converter, microseconds, precision): ... + def sql_type(converter): ... + +class TimeConverter(ConverterWithMicroseconds): + sql_type_name: ClassVar[str] + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + +class TimedeltaConverter(ConverterWithMicroseconds): + sql_type_name: ClassVar[str] + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + +class DatetimeConverter(ConverterWithMicroseconds): + sql_type_name: ClassVar[str] + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + +class UuidConverter(Converter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def validate(converter, val, obj=None): ... + def py2sql(converter, val): ... + sql2py = validate + def sql_type(converter): ... + +class JsonConverter(Converter): + json_kwargs: Incomplete + + class JsonEncoder(json.JSONEncoder): + def default(converter, obj): ... + + def validate(converter, val, obj=None): ... + def val2dbval(converter, val, obj=None): ... + def dbval2val(converter, dbval, obj=None): ... + def dbvals_equal(converter, x, y): ... + def sql_type(converter): ... + +class ArrayConverter(Converter): + array_types: Incomplete + def __init__(converter, provider, py_type, attr=None) -> None: ... + def validate(converter, val, obj=None): ... + def dbval2val(converter, dbval, obj=None): ... + def val2dbval(converter, val, obj=None): ... + def sql_type(converter): ... diff --git a/stubs/pony/pony/orm/dbproviders/__init__.pyi b/stubs/pony/pony/orm/dbproviders/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/pony/pony/orm/dbproviders/cockroach.pyi b/stubs/pony/pony/orm/dbproviders/cockroach.pyi new file mode 100644 index 000000000000..2a3cfa5c7a2d --- /dev/null +++ b/stubs/pony/pony/orm/dbproviders/cockroach.pyi @@ -0,0 +1,49 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from pony.orm import dbapiprovider +from pony.orm.dbproviders.postgres import ( + PGArrayConverter, + PGBlobConverter, + PGColumn, + PGIntConverter, + PGProvider, + PGSchema, + PGSQLBuilder, + PGTimedeltaConverter, + PGTranslator, +) + +NoneType: type[None] + +class CRColumn(PGColumn): + auto_template: ClassVar[str] + +class CRSchema(PGSchema): + column_class: ClassVar[type[CRColumn]] + +class CRTranslator(PGTranslator): ... +class CRSQLBuilder(PGSQLBuilder): ... + +class CRIntConverter(PGIntConverter): + signed_types: Incomplete + unsigned_types: Incomplete + +class CRBlobConverter(PGBlobConverter): + def sql_type(converter): ... + +class CRTimedeltaConverter(PGTimedeltaConverter): ... + +class PGUuidConverter(dbapiprovider.UuidConverter): + def py2sql(converter, val): ... + +class CRArrayConverter(PGArrayConverter): + array_types: Incomplete + +class CRProvider(PGProvider): + dbschema_cls: ClassVar[type[CRSchema]] + translator_cls: ClassVar[type[CRTranslator]] + sqlbuilder_cls: ClassVar[type[CRSQLBuilder]] + array_converter_cls: ClassVar[type[CRArrayConverter]] + +provider_cls = CRProvider diff --git a/stubs/pony/pony/orm/dbproviders/mysql.pyi b/stubs/pony/pony/orm/dbproviders/mysql.pyi new file mode 100644 index 000000000000..61b2e4d9cfd3 --- /dev/null +++ b/stubs/pony/pony/orm/dbproviders/mysql.pyi @@ -0,0 +1,101 @@ +import types +from _typeshed import Incomplete +from typing import ClassVar + +from pony.orm import dbapiprovider, dbschema +from pony.orm.dbapiprovider import DBAPIProvider +from pony.orm.sqlbuilding import SQLBuilder, Value +from pony.orm.sqltranslation import SQLTranslator + +NoneType: type[None] +mysql_module_name: str + +class MySQLColumn(dbschema.Column): + auto_template: ClassVar[str] + +class MySQLSchema(dbschema.DBSchema): + dialect: ClassVar[str] + column_class: ClassVar[type[MySQLColumn]] + +class MySQLTranslator(SQLTranslator): + dialect: ClassVar[str] + +class MySQLValue(Value): ... + +class MySQLBuilder(SQLBuilder): + dialect: ClassVar[str] + value_class: ClassVar[type[MySQLValue]] + def CONCAT(builder, *args): ... + def TRIM(builder, expr, chars=None): ... + def LTRIM(builder, expr, chars=None): ... + def RTRIM(builder, expr, chars=None): ... + def TO_INT(builder, expr): ... + def TO_REAL(builder, expr): ... + def TO_STR(builder, expr): ... + def YEAR(builder, expr): ... + def MONTH(builder, expr): ... + def DAY(builder, expr): ... + def HOUR(builder, expr): ... + def MINUTE(builder, expr): ... + def SECOND(builder, expr): ... + def DATE_ADD(builder, expr, delta): ... + def DATE_SUB(builder, expr, delta): ... + def DATE_DIFF(builder, expr1, expr2): ... + def DATETIME_ADD(builder, expr, delta): ... + def DATETIME_SUB(builder, expr, delta): ... + def DATETIME_DIFF(builder, expr1, expr2): ... + def JSON_QUERY(builder, expr, path): ... + def JSON_VALUE(builder, expr, path, type): ... + def JSON_NONZERO(builder, expr): ... + def JSON_ARRAY_LENGTH(builder, value): ... + def JSON_EQ(builder, left, right): ... + def JSON_NE(builder, left, right): ... + def JSON_CONTAINS(builder, expr, path, key): ... + @classmethod + def wrap_param_to_json_array(cls, values): ... + def JSON_PARAM(builder, expr): ... + +class MySQLStrConverter(dbapiprovider.StrConverter): + def sql_type(converter): ... + +class MySQLRealConverter(dbapiprovider.RealConverter): + def sql_type(converter): ... + +class MySQLBlobConverter(dbapiprovider.BlobConverter): + def sql_type(converter): ... + +class MySQLTimeConverter(dbapiprovider.TimeConverter): + def sql2py(converter, val): ... + +class MySQLTimedeltaConverter(dbapiprovider.TimedeltaConverter): ... + +class MySQLUuidConverter(dbapiprovider.UuidConverter): + def sql_type(converter): ... + +class MySQLJsonConverter(dbapiprovider.JsonConverter): + EQ: str + NE: str + def init(self, kwargs) -> None: ... + +class MySQLProvider(DBAPIProvider): + dialect: ClassVar[str] + varchar_default_max_len: ClassVar[int] + dbapi_module: ClassVar[types.ModuleType] + dbschema_cls: ClassVar[type[MySQLSchema]] + translator_cls: ClassVar[type[MySQLTranslator]] + sqlbuilder_cls: ClassVar[type[MySQLBuilder]] + fk_types: ClassVar[dict[str, str]] + converter_classes: Incomplete + def normalize_name(provider, name): ... + def inspect_connection(provider, connection) -> None: ... + def should_reconnect(provider, exc): ... + def get_pool(provider, *args, **kwargs): ... + def set_transaction_mode(provider, connection, cache) -> None: ... + def release(provider, connection, cache=None) -> None: ... + def table_exists(provider, connection, table_name, case_sensitive: bool = True): ... + def index_exists(provider, connection, table_name, index_name, case_sensitive: bool = True): ... + def fk_exists(provider, connection, table_name, fk_name, case_sensitive: bool = True): ... + +provider_cls = MySQLProvider + +def str2datetime(s): ... diff --git a/stubs/pony/pony/orm/dbproviders/oracle.pyi b/stubs/pony/pony/orm/dbproviders/oracle.pyi new file mode 100644 index 000000000000..220e58f4099d --- /dev/null +++ b/stubs/pony/pony/orm/dbproviders/oracle.pyi @@ -0,0 +1,159 @@ +import re +from _typeshed import Incomplete +from typing import ClassVar + +from pony.orm import dbapiprovider, sqltranslation +from pony.orm.dbapiprovider import DBAPIProvider +from pony.orm.dbschema import Column, DBObject, DBSchema, Table +from pony.orm.sqlbuilding import SQLBuilder + +NoneType: type[None] + +class OraTable(Table): + def get_objects_to_create(table, created_tables=None): ... + +class OraSequence(DBObject): + typename: ClassVar[str] + def __init__(sequence, table, name=None) -> None: ... + def exists(sequence, provider, connection, case_sensitive: bool = True): ... + def get_create_command(sequence): ... + +trigger_template: str + +class OraTrigger(DBObject): + typename: ClassVar[str] + def __init__(trigger, table, column, sequence) -> None: ... + def exists(trigger, provider, connection, case_sensitive: bool = True): ... + def get_create_command(trigger): ... + +class OraColumn(Column): + auto_template: ClassVar[None] # type: ignore[assignment] + +class OraSchema(DBSchema): + dialect: ClassVar[str] + table_class: ClassVar[type[OraTable]] + column_class: ClassVar[type[OraColumn]] + +class OraNoneMonad(sqltranslation.NoneMonad): + def __init__(monad, value=None) -> None: ... + +class OraConstMonad(sqltranslation.ConstMonad): + @staticmethod + def new(value): ... + +class OraTranslator(sqltranslation.SQLTranslator): + dialect: ClassVar[str] + NoneMonad = OraNoneMonad + ConstMonad = OraConstMonad + +class OraBuilder(SQLBuilder): + dialect: ClassVar[str] + def INSERT(builder, table_name, columns, values, returning=None): ... + def SELECT_FOR_UPDATE(builder, nowait, skip_locked, *sections): ... + def SELECT(builder, *sections): ... + def ROWID(builder, *expr_list): ... + def LIMIT(builder, limit, offset=None) -> None: ... + def TO_REAL(builder, expr): ... + def TO_STR(builder, expr): ... + def DATE(builder, expr): ... + def RANDOM(builder): ... + def MOD(builder, a, b): ... + def DATE_ADD(builder, expr, delta): ... + def DATE_SUB(builder, expr, delta): ... + def DATE_DIFF(builder, expr1, expr2): ... + def DATETIME_ADD(builder, expr, delta): ... + def DATETIME_SUB(builder, expr, delta): ... + def DATETIME_DIFF(builder, expr1, expr2): ... + def build_json_path(builder, path): ... + def JSON_QUERY(builder, expr, path): ... + json_value_type_mapping: Incomplete + def JSON_VALUE(builder, expr, path, type): ... + def JSON_NONZERO(builder, expr): ... + def JSON_CONTAINS(builder, expr, path, key): ... + def JSON_ARRAY_LENGTH(builder, value) -> None: ... + def GROUP_CONCAT(builder, distinct, expr, sep=None): ... + +json_item_re: re.Pattern[str] + +class OraBoolConverter(dbapiprovider.BoolConverter): + def py2sql(converter, val): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class OraStrConverter(dbapiprovider.StrConverter): + def validate(converter, val, obj=None): ... + def sql2py(converter, val): ... + def sql_type(converter): ... + +class OraIntConverter(dbapiprovider.IntConverter): + signed_types: Incomplete + unsigned_types: Incomplete + def init(self, kwargs) -> None: ... + +class OraRealConverter(dbapiprovider.RealConverter): + def sql_type(converter): ... + +class OraDecimalConverter(dbapiprovider.DecimalConverter): + def sql_type(converter): ... + +class OraBlobConverter(dbapiprovider.BlobConverter): + def sql2py(converter, val): ... + +class OraDateConverter(dbapiprovider.DateConverter): + def sql2py(converter, val): ... + +class OraTimeConverter(dbapiprovider.TimeConverter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class OraTimedeltaConverter(dbapiprovider.TimedeltaConverter): + def __init__(converter, provider, py_type, attr=None) -> None: ... + +class OraDatetimeConverter(dbapiprovider.DatetimeConverter): ... + +class OraUuidConverter(dbapiprovider.UuidConverter): + def sql_type(converter): ... + +class OraJsonConverter(dbapiprovider.JsonConverter): + json_kwargs: Incomplete + optimistic: bool + def sql2py(converter, dbval): ... + def sql_type(converter): ... + +class OraProvider(DBAPIProvider): + dialect: ClassVar[str] + varchar_default_max_len: ClassVar[int] + dbschema_cls: ClassVar[type[OraSchema]] + translator_cls: ClassVar[type[OraTranslator]] + sqlbuilder_cls: ClassVar[type[OraBuilder]] + converter_classes: Incomplete + def inspect_connection(provider, connection) -> None: ... + def should_reconnect(provider, exc): ... + def normalize_name(provider, name): ... + def normalize_vars(provider, vars, vartypes) -> None: ... + def set_transaction_mode(provider, connection, cache) -> None: ... + def execute(provider, cursor, sql, arguments=None, returning_id: bool = False): ... + def get_pool(provider, *args, **kwargs): ... + def table_exists(provider, connection, table_name, case_sensitive: bool = True): ... + def index_exists(provider, connection, table_name, index_name, case_sensitive: bool = True): ... + def fk_exists(provider, connection, table_name, fk_name, case_sensitive: bool = True): ... + def table_has_data(provider, connection, table_name): ... + def drop_table(provider, connection, table_name) -> None: ... + +provider_cls = OraProvider + +def to_int_or_decimal(val): ... +def to_decimal(val): ... +def output_type_handler(cursor, name, defaultType, size, precision, scale): ... + +class OraPool: + forked_pools: Incomplete + def __init__(pool, **kwargs) -> None: ... + def connect(pool): ... + def release(pool, con) -> None: ... + def drop(pool, con) -> None: ... + def disconnect(pool) -> None: ... + +def get_inputsize(arg): ... +def set_input_sizes(cursor, arguments) -> None: ... diff --git a/stubs/pony/pony/orm/dbproviders/postgres.pyi b/stubs/pony/pony/orm/dbproviders/postgres.pyi new file mode 100644 index 000000000000..f3aef861eebe --- /dev/null +++ b/stubs/pony/pony/orm/dbproviders/postgres.pyi @@ -0,0 +1,105 @@ +import types +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import ClassVar + +from pony.orm import dbapiprovider, dbschema +from pony.orm.dbapiprovider import DBAPIProvider, Pool +from pony.orm.sqlbuilding import SQLBuilder, Value +from pony.orm.sqltranslation import SQLTranslator +from psycopg2._psycopg import _Connection + +NoneType: type[None] + +class PGColumn(dbschema.Column): + auto_template: ClassVar[str] + +class PGSchema(dbschema.DBSchema): + dialect: ClassVar[str] + column_class: ClassVar[type[PGColumn]] + +class PGTranslator(SQLTranslator): + dialect: ClassVar[str] + +class PGValue(Value): ... + +class PGSQLBuilder(SQLBuilder): + dialect: ClassVar[str] + value_class: ClassVar[type[PGValue]] + def INSERT(builder, table_name, columns, values, returning=None): ... + def TO_INT(builder, expr): ... + def TO_STR(builder, expr): ... + def TO_REAL(builder, expr): ... + def DATE(builder, expr): ... + def RANDOM(builder): ... + def DATE_ADD(builder, expr, delta): ... + def DATE_SUB(builder, expr, delta): ... + def DATE_DIFF(builder, expr1, expr2): ... + def DATETIME_ADD(builder, expr, delta): ... + def DATETIME_SUB(builder, expr, delta): ... + def DATETIME_DIFF(builder, expr1, expr2): ... + def eval_json_path(builder, values: Iterable[int | str]) -> str: ... # type: ignore[override] + def JSON_QUERY(builder, expr, path): ... + json_value_type_mapping: dict[type, str] + def JSON_VALUE(builder, expr, path, type): ... + def JSON_NONZERO(builder, expr): ... + def JSON_CONCAT(builder, left, right): ... + def JSON_CONTAINS(builder, expr, path, key): ... + def JSON_ARRAY_LENGTH(builder, value): ... + def GROUP_CONCAT(builder, distinct, expr, sep=None): ... + def ARRAY_INDEX(builder, col, index): ... + def ARRAY_CONTAINS(builder, key, not_in, col): ... + def ARRAY_SUBSET(builder, array1, not_in, array2): ... + def ARRAY_LENGTH(builder, array): ... + def ARRAY_SLICE(builder, array, start, stop): ... + def MAKE_ARRAY(builder, *items): ... + +class PGIntConverter(dbapiprovider.IntConverter): + signed_types: Incomplete + unsigned_types: Incomplete + +class PGRealConverter(dbapiprovider.RealConverter): + def sql_type(converter): ... + +class PGBlobConverter(dbapiprovider.BlobConverter): + def sql_type(converter): ... + +class PGTimedeltaConverter(dbapiprovider.TimedeltaConverter): ... +class PGDatetimeConverter(dbapiprovider.DatetimeConverter): ... + +class PGUuidConverter(dbapiprovider.UuidConverter): + def py2sql(converter, val): ... + +class PGJsonConverter(dbapiprovider.JsonConverter): + def sql_type(self): ... + +class PGArrayConverter(dbapiprovider.ArrayConverter): + array_types: dict[type, tuple[str, type]] + +class PGPool(Pool): + def release(pool, con) -> None: ... + +ADMIN_SHUTDOWN: str + +class PGProvider(DBAPIProvider): + dialect: ClassVar[str] + dbapi_module: ClassVar[types.ModuleType] + dbschema_cls: ClassVar[type[PGSchema]] + translator_cls: ClassVar[type[PGTranslator]] + sqlbuilder_cls: ClassVar[type[PGSQLBuilder]] + array_converter_cls: ClassVar[type[PGArrayConverter]] + default_schema_name: ClassVar[str] + fk_types: ClassVar[dict[str, str]] + converter_classes: list[tuple[type | tuple[type], type]] + def normalize_name(provider, name: str) -> str: ... + def inspect_connection(provider, connection: _Connection) -> None: ... + def should_reconnect(provider, exc: BaseException | None) -> bool: ... + def get_pool(provider, *args, **kwargs) -> PGPool: ... + def set_transaction_mode(provider, connection: _Connection, cache) -> None: ... + def execute(provider, cursor, sql, arguments=None, returning_id: bool = False): ... + def table_exists(provider, connection: _Connection, table_name: str, case_sensitive: bool = True): ... + def index_exists(provider, connection: _Connection, table_name: str, index_name, case_sensitive: bool = True): ... + def fk_exists(provider, connection: _Connection, table_name: str, fk_name, case_sensitive: bool = True): ... + def drop_table(provider, connection: _Connection, table_name: str) -> None: ... + +provider_cls = PGProvider diff --git a/stubs/pony/pony/orm/dbproviders/sqlite.pyi b/stubs/pony/pony/orm/dbproviders/sqlite.pyi new file mode 100644 index 000000000000..0bf153cd8d70 --- /dev/null +++ b/stubs/pony/pony/orm/dbproviders/sqlite.pyi @@ -0,0 +1,219 @@ +import re +import sys +import types +from _typeshed import Incomplete, StrOrBytesPath +from sqlite3 import Connection as _Connection +from typing import Any, ClassVar, overload + +from pony.orm import dbapiprovider, dbschema +from pony.orm.dbapiprovider import DBAPIProvider, Pool +from pony.orm.sqlbuilding import SQLBuilder, Value +from pony.orm.sqltranslation import SQLTranslator +from pony.utils import localbase + +class SqliteExtensionUnavailable(Exception): ... + +NoneType: type[None] + +class SQLiteForeignKey(dbschema.ForeignKey): + def get_create_command(foreign_key) -> None: ... + +class SQLiteSchema(dbschema.DBSchema): + dialect: ClassVar[str] + fk_class: ClassVar[type[SQLiteForeignKey]] + +def make_overriden_string_func(sqlop): ... + +class SQLiteTranslator(SQLTranslator): + dialect: ClassVar[str] + sqlite_version: tuple[int, int, int] + StringMixin_UPPER: Incomplete + StringMixin_LOWER: Incomplete + +class SQLiteValue(Value): ... + +class SQLiteBuilder(SQLBuilder): + dialect: ClassVar[str] + least_func_name: ClassVar[str] + greatest_func_name: ClassVar[str] + value_class: ClassVar[type[SQLiteValue]] + def __init__(builder, provider, ast) -> None: ... + def SELECT_FOR_UPDATE(builder, nowait, skip_locked, *sections): ... + def INSERT(builder, table_name, columns, values, returning=None): ... + def STRING_SLICE(builder, expr, start, stop): ... + def IN(builder, expr1, x): ... + def NOT_IN(builder, expr1, x): ... + def TODAY(builder): ... + def NOW(builder): ... + def YEAR(builder, expr): ... + def MONTH(builder, expr): ... + def DAY(builder, expr): ... + def HOUR(builder, expr): ... + def MINUTE(builder, expr): ... + def SECOND(builder, expr): ... + def datetime_add(builder, funcname, expr, td): ... + def DATE_ADD(builder, expr, delta): ... + def DATE_SUB(builder, expr, delta): ... + def DATE_DIFF(builder, expr1, expr2): ... + def DATETIME_ADD(builder, expr, delta): ... + def DATETIME_SUB(builder, expr, delta): ... + def DATETIME_DIFF(builder, expr1, expr2): ... + def RANDOM(builder): ... + PY_UPPER: Incomplete + PY_LOWER: Incomplete + def FLOAT_EQ(builder, a, b): ... + def FLOAT_NE(builder, a, b): ... + def JSON_QUERY(builder, expr, path): ... + json_value_type_mapping: Incomplete + def JSON_VALUE(builder, expr, path, type): ... + def JSON_NONZERO(builder, expr): ... + def JSON_ARRAY_LENGTH(builder, value): ... + def JSON_CONTAINS(builder, expr, path, key): ... + def ARRAY_INDEX(builder, col, index): ... + def ARRAY_CONTAINS(builder, key, not_in, col): ... + def ARRAY_SUBSET(builder, array1, not_in, array2): ... + def ARRAY_LENGTH(builder, array): ... + def ARRAY_SLICE(builder, array, start, stop): ... + def MAKE_ARRAY(builder, *items): ... + +class SQLiteIntConverter(dbapiprovider.IntConverter): + def sql_type(converter): ... + +class SQLiteDecimalConverter(dbapiprovider.DecimalConverter): + inf: Incomplete + neg_inf: Incomplete + NaN: Incomplete + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class SQLiteDateConverter(dbapiprovider.DateConverter): + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class SQLiteTimeConverter(dbapiprovider.TimeConverter): + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class SQLiteTimedeltaConverter(dbapiprovider.TimedeltaConverter): + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class SQLiteDatetimeConverter(dbapiprovider.DatetimeConverter): + def sql2py(converter, val): ... + def py2sql(converter, val): ... + +class SQLiteJsonConverter(dbapiprovider.JsonConverter): + json_kwargs: Incomplete + +def dumps(items): ... + +class SQLiteArrayConverter(dbapiprovider.ArrayConverter): + array_types: Incomplete + def dbval2val(converter, dbval, obj=None): ... + def val2dbval(converter, val, obj=None): ... + +class LocalExceptions(localbase): + exc_info: Incomplete + keep_traceback: bool + def __init__(self) -> None: ... + +local_exceptions: LocalExceptions + +def keep_exception(func): ... + +class SQLiteProvider(DBAPIProvider): + dialect: ClassVar[str] + local_exceptions: LocalExceptions + dbapi_module: ClassVar[types.ModuleType] + dbschema_cls: ClassVar[type[SQLiteSchema]] + translator_cls: ClassVar[type[SQLiteTranslator]] + sqlbuilder_cls: ClassVar[type[SQLiteBuilder]] + array_converter_cls: ClassVar[type[SQLiteArrayConverter]] + server_version: tuple[int, int, int] + converter_classes: Incomplete + def __init__(provider, database, filename, **kwargs) -> None: ... + def inspect_connection(provider, conn) -> None: ... + def restore_exception(provider) -> None: ... + def acquire_lock(provider) -> None: ... + def release_lock(provider) -> None: ... + def set_transaction_mode(provider, connection: _Connection, cache) -> None: ... + def commit(provider, connection: _Connection, cache=None) -> None: ... + def rollback(provider, connection: _Connection, cache=None) -> None: ... + def drop(provider, connection: _Connection, cache=None) -> None: ... + def release(provider, connection: _Connection, cache=None) -> None: ... + def get_pool(provider, is_shared_memory_db, filename, create_db: bool = False, **kwargs): ... + def table_exists(provider, connection: _Connection, table_name: str, case_sensitive: bool = True): ... + def index_exists(provider, connection: _Connection, table_name: str, index_name, case_sensitive: bool = True): ... + def fk_exists(provider, connection: _Connection, table_name: str, fk_name) -> None: ... # type: ignore[override] + def check_json1(provider, connection: _Connection) -> bool: ... + +provider_cls = SQLiteProvider + +def make_string_function(name, base_func): ... + +py_upper: Incomplete +py_lower: Incomplete + +@overload +def py_json_unwrap(value: str) -> str | None: ... +@overload +def py_json_unwrap(value: Any) -> None: ... + +path_cache: Incomplete +json_path_re: re.Pattern[str] + +def py_json_extract(expr, *paths): ... +def py_json_query(expr, path, with_wrapper): ... +def py_json_value(expr, path): ... +def py_json_contains(expr, path, key): ... +def py_json_nonzero(expr, path): ... +def py_json_array_length(expr, path=None): ... +def wrap_array_func(func): ... +def py_array_index(array, index): ... +def py_array_contains(array, item): ... +def py_array_subset(array, items): ... +def py_array_length(array): ... +def py_array_slice(array, start, stop): ... +def py_make_array(*items): ... +@overload +def py_string_slice(s: None, start: str | int | None, end: str | int | None) -> None: ... +@overload +def py_string_slice(s: str, start: str | int | None, end: str | int | None) -> str: ... + +class SQLitePool(Pool): + is_shared_memory_db: bool | None + filename: StrOrBytesPath + create_db: bool | None + kwargs: dict[str, Incomplete] + if sys.version_info >= (3, 12): + def __init__( + pool, + is_shared_memory_db: bool | None, + filename: StrOrBytesPath, + create_db: bool | None, + *, + timeout: float = 5.0, + detect_types: int = 0, + check_same_thread: bool = True, + factory: type[_Connection], + cached_statements: int = 128, + uri: bool = False, + autocommit: bool = ..., + ) -> None: ... + else: + def __init__( + pool, + is_shared_memory_db: bool | None, + filename: StrOrBytesPath, + create_db: bool | None, + *, + timeout: float = 5.0, + detect_types: int = 0, + check_same_thread: bool = True, + factory: type[_Connection], + cached_statements: int = 128, + uri: bool = False, + ) -> None: ... + + def disconnect(pool) -> None: ... + def drop(pool, con: _Connection) -> None: ... diff --git a/stubs/pony/pony/orm/dbschema.pyi b/stubs/pony/pony/orm/dbschema.pyi new file mode 100644 index 000000000000..686ca5f2dc7e --- /dev/null +++ b/stubs/pony/pony/orm/dbschema.pyi @@ -0,0 +1,86 @@ +from _typeshed import Incomplete +from typing import ClassVar + +from pony.orm.dbapiprovider import DBAPIProvider + +class DBSchema: + dialect: ClassVar[str | None] + inline_fk_syntax: ClassVar[bool] + named_foreign_keys: ClassVar[bool] + table_class: ClassVar[type[Table]] + column_class: ClassVar[type[Column]] + index_class: ClassVar[type[DBIndex]] + fk_class: ClassVar[type[ForeignKey]] + provider: DBAPIProvider + tables: dict[Incomplete, Incomplete] + constraints: dict[Incomplete, Incomplete] + indent: str + command_separator: str + uppercase: bool + names: dict[Incomplete, Incomplete] + def __init__(schema, provider: DBAPIProvider, uppercase: bool = True) -> None: ... + def column_list(schema, columns): ... + def case(schema, s: str) -> str: ... + def add_table(schema, table_name, entity=None): ... + def order_tables_to_create(schema): ... + def generate_create_script(schema): ... + def create_tables(schema, provider: DBAPIProvider, connection) -> None: ... + def check_tables(schema, provider: DBAPIProvider, connection) -> None: ... + +class DBObject: + def create(table, provider, connection) -> None: ... + +class Table(DBObject): + typename: ClassVar[str] + def __init__(table, name, schema, entity=None) -> None: ... + def add_entity(table, entity) -> None: ... + def exists(table, provider: DBAPIProvider, connection, case_sensitive: bool = True): ... + def get_create_command(table): ... + def format_option(table, name, value): ... + def get_objects_to_create(table, created_tables: set[Table] | None = None) -> list[Table]: ... + def add_column(table, column_name, sql_type, converter, is_not_null: bool | None = None, sql_default=None): ... + def add_index(table, index_name, columns, is_pk: bool = False, is_unique=None, m2m: bool = False): ... + def add_foreign_key( + table, + fk_name, + child_columns, + parent_table, + parent_columns, + index_name=None, + on_delete: bool = False, + interleave: bool = False, + ): ... + +class Column: + auto_template: ClassVar[str] + def __init__(column, name, table, sql_type, converter, is_not_null: bool | None = None, sql_default=None) -> None: ... + def get_sql(column) -> str: ... + +class Constraint(DBObject): + schema: DBSchema + name: str | None + def __init__(constraint, name: str | None, schema: DBSchema) -> None: ... + +class DBIndex(Constraint): + typename: ClassVar[str] + def __init__(index, name: str | None, table, columns, is_pk: bool = False, is_unique=None) -> None: ... + def exists(index, provider, connection, case_sensitive: bool = True): ... + def get_sql(index) -> str: ... + def get_create_command(index): ... + +class ForeignKey(Constraint): + typename: ClassVar[str] + def __init__( + foreign_key, + name, + child_table, + child_columns, + parent_table, + parent_columns, + index_name, + on_delete, + interleave: bool = False, + ) -> None: ... + def exists(foreign_key, provider, connection, case_sensitive: bool = True): ... + def get_sql(foreign_key) -> str: ... + def get_create_command(foreign_key): ... diff --git a/stubs/pony/pony/orm/decompiling.pyi b/stubs/pony/pony/orm/decompiling.pyi new file mode 100644 index 000000000000..83f56630b590 --- /dev/null +++ b/stubs/pony/pony/orm/decompiling.pyi @@ -0,0 +1,135 @@ +import ast +from _typeshed import Incomplete + +class DecompileError(NotImplementedError): ... + +ast_cache: dict[int, tuple[Incomplete | None, set[Incomplete]]] + +def decompile(x): ... +def simplify(clause): ... + +class InvalidQuery(Exception): ... + +def binop(node_type): ... + +operator_mapping: dict[str, type[ast.cmpop]] + +def clean_assign(node): ... +def make_const(value): ... +def is_const(value) -> bool: ... +def unwrap_str(key) -> str: ... + +class Decompiler: + def __init__(decompiler, code, start: int = 0, end=None) -> None: ... + def get_instructions(decompiler) -> None: ... + def analyze_jumps(decompiler) -> None: ... + def decompile(decompiler) -> None: ... + def pop_items(decompiler, size): ... + def store(decompiler, node) -> None: ... + BINARY_POWER: Incomplete + BINARY_MULTIPLY: Incomplete + BINARY_DIVIDE: Incomplete + BINARY_FLOOR_DIVIDE: Incomplete + BINARY_ADD: Incomplete + BINARY_SUBTRACT: Incomplete + BINARY_LSHIFT: Incomplete + BINARY_RSHIFT: Incomplete + BINARY_AND: Incomplete + BINARY_XOR: Incomplete + BINARY_OR: Incomplete + BINARY_TRUE_DIVIDE = BINARY_DIVIDE + BINARY_MODULO: Incomplete + def BINARY_OP(decompiler, opcode): ... + def BINARY_SLICE(decompiler): ... + def BINARY_SUBSCR(decompiler): ... + def BUILD_CONST_KEY_MAP(decompiler, length): ... + def BUILD_LIST(decompiler, size): ... + def BUILD_MAP(decompiler, length): ... + def BUILD_SET(decompiler, size): ... + def BUILD_SLICE(decompiler, size): ... + def BUILD_TUPLE(decompiler, size): ... + def BUILD_STRING(decompiler, count): ... + def CALL_FUNCTION(decompiler, argc, star=None, star2=None): ... + def CACHE(decompiler) -> None: ... + def CALL(decompiler, argc): ... + def CALL_FUNCTION_VAR(decompiler, argc): ... + def CALL_FUNCTION_KW(decompiler, argc): ... + def CALL_FUNCTION_VAR_KW(decompiler, argc): ... + def CALL_FUNCTION_EX(decompiler, argc): ... + def CALL_METHOD(decompiler, argc): ... + def COMPARE_OP(decompiler, op): ... + def COPY(decompiler, _) -> None: ... + def COPY_FREE_VARS(decompiler, n) -> None: ... + def CONTAINS_OP(decompiler, invert): ... + def DUP_TOP(decompiler): ... + def FOR_ITER(decompiler, endpos): ... + def FORMAT_VALUE(decompiler, flags): ... + def GEN_START(decompiler, kind) -> None: ... + def GET_ITER(decompiler) -> None: ... + def JUMP_IF_FALSE(decompiler, endpos): ... + JUMP_IF_FALSE_OR_POP = JUMP_IF_FALSE + def JUMP_IF_NOT_EXC_MATCH(decompiler, endpos) -> None: ... + def JUMP_IF_TRUE(decompiler, endpos): ... + JUMP_IF_TRUE_OR_POP = JUMP_IF_TRUE + def conditional_jump(decompiler, endpos, if_true): ... + def conditional_jump_old(decompiler, endpos, if_true): ... + def conditional_jump_new(decompiler, endpos, if_true): ... + def conditional_jump_none_impl(decompiler, endpos, negate): ... + def jump_if_none(decompiler, endpos): ... + def jump_if_not_none(decompiler, endpos): ... + def process_target(decompiler, pos, partial: bool = False) -> None: ... + def JUMP_FORWARD(decompiler, endpos): ... + def KW_NAMES(decompiler, kw_names) -> None: ... + def IS_OP(decompiler, invert): ... + def LIST_APPEND(decompiler, offset) -> None: ... + def LIST_EXTEND(decompiler, offset): ... + def LIST_TO_TUPLE(decompiler): ... + def LOAD_ATTR(decompiler, attr_name, push_null): ... + def LOAD_CLOSURE(decompiler, freevar): ... + def LOAD_CONST(decompiler, const_value): ... + def LOAD_DEREF(decompiler, freevar): ... + def LOAD_FAST(decompiler, varname): ... + LOAD_FAST_AND_CLEAR = LOAD_FAST + def LOAD_GLOBAL(decompiler, varname, push_null): ... + def LOAD_METHOD(decompiler, methname): ... + LOOKUP_METHOD = LOAD_METHOD + def LOAD_NAME(decompiler, varname): ... + def MAKE_CELL(decompiler, freevar) -> None: ... + def MAKE_CLOSURE(decompiler, argc): ... + def MAKE_FUNCTION(decompiler, argc): ... + POP_JUMP_BACKWARD_IF_FALSE = JUMP_IF_FALSE + POP_JUMP_BACKWARD_IF_TRUE = JUMP_IF_TRUE + POP_JUMP_FORWARD_IF_FALSE = JUMP_IF_FALSE + POP_JUMP_FORWARD_IF_TRUE = JUMP_IF_TRUE + POP_JUMP_IF_FALSE = JUMP_IF_FALSE + POP_JUMP_IF_TRUE = JUMP_IF_TRUE + POP_JUMP_BACKWARD_IF_NONE = jump_if_none + POP_JUMP_BACKWARD_IF_NOT_NONE = jump_if_not_none + POP_JUMP_FORWARD_IF_NONE = jump_if_none + POP_JUMP_FORWARD_IF_NOT_NONE = jump_if_not_none + def POP_TOP(decompiler) -> None: ... + def PRECALL(decompiler, argc) -> None: ... + def PUSH_NULL(decompiler) -> None: ... + def RETURN_VALUE(decompiler): ... + def RETURN_CONST(decompiler, val): ... + def RETURN_GENERATOR(decompiler) -> None: ... + def RESUME(decompiler, where) -> None: ... + def ROT_TWO(decompiler) -> None: ... + def ROT_THREE(decompiler) -> None: ... + def SETUP_LOOP(decompiler, endpos) -> None: ... + def STORE_ATTR(decompiler, attrname) -> None: ... + def STORE_DEREF(decompiler, freevar) -> None: ... + def STORE_FAST(decompiler, varname) -> None: ... + def STORE_MAP(decompiler) -> None: ... + def STORE_SUBSCR(decompiler) -> None: ... + def SWAP(decompiler, _) -> None: ... + def UNARY_POSITIVE(decompiler): ... + def UNARY_NEGATIVE(decompiler): ... + def UNARY_NOT(decompiler): ... + def UNARY_INVERT(decompiler): ... + def UNPACK_SEQUENCE(decompiler, count): ... + def YIELD_VALUE(decompiler, _=None): ... + +test_lines: str + +def test(test_line=None) -> None: ... diff --git a/stubs/pony/pony/orm/examples/__init__.pyi b/stubs/pony/pony/orm/examples/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/pony/pony/orm/examples/alessandro_bug.pyi b/stubs/pony/pony/orm/examples/alessandro_bug.pyi new file mode 100644 index 000000000000..07b92133e84b --- /dev/null +++ b/stubs/pony/pony/orm/examples/alessandro_bug.pyi @@ -0,0 +1,48 @@ +from _typeshed import Incomplete + +from pony.orm import * +from pony.orm.core import Database, Entity + +database: Database + +class User(Entity): + user_id: PrimaryKey + owned_pokemons: Incomplete + is_admin: Incomplete + is_banned: Incomplete + @staticmethod + def get_or_create(user_id: int) -> User: ... + @staticmethod + def get_by_id(user_id: int) -> User: ... + def catch_pokemon(self, pokemon: Pokemon): ... + def remove_pokemon(self, pokemon: Pokemon): ... + favorite_color: Incomplete + def set_favorite_color(self, color: tuple[Incomplete, ...]): ... + +class Pokemon(Entity): + name: Incomplete + pokemon_id: Incomplete + sprite: Incomplete + is_shiny: Incomplete + owner: Incomplete + spawned_chat_id: Incomplete + spawned_message_id: Incomplete + @property + def captured(self) -> bool: ... + def caught_by(self, user: User): ... + +class Chat(Entity): + chat_id: Incomplete + active: Incomplete + def activate(self) -> None: ... + def deactivate(self) -> None: ... + @staticmethod + def get_or_create(chat_id: int) -> Chat: ... + @staticmethod + def get_by_id(chat_id: int) -> Chat: ... + +def spawn_pokemon( + chat_id: int, message_id: int, pokemon_json: dict[Incomplete, Incomplete], is_shiny: bool = False +) -> Pokemon: ... +def get_spawned_pokemon(chat_id: int, message_id: int) -> Pokemon | None: ... +def setup() -> None: ... diff --git a/stubs/pony/pony/orm/examples/bottle_example.pyi b/stubs/pony/pony/orm/examples/bottle_example.pyi new file mode 100644 index 000000000000..5ea35b2aacd0 --- /dev/null +++ b/stubs/pony/pony/orm/examples/bottle_example.pyi @@ -0,0 +1,6 @@ +from pony.orm.examples.estore import * + +def all_products() -> str: ... +def show_product(id: int) -> str: ... +def edit_product(id: int) -> str: ... +def save_product(id: int) -> None: ... diff --git a/stubs/pony/pony/orm/examples/bug_ben.pyi b/stubs/pony/pony/orm/examples/bug_ben.pyi new file mode 100644 index 000000000000..3b7ecb074f0e --- /dev/null +++ b/stubs/pony/pony/orm/examples/bug_ben.pyi @@ -0,0 +1,19 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class ReconciledPayments(Entity): + id: Incomplete + foo: Incomplete + add_on_id: Incomplete + +class ContractAddOns(Entity): + id: Incomplete + reconciled_payments: Incomplete + +r1: ReconciledPayments +r2: ReconciledPayments +c1: ContractAddOns +old_val: Incomplete diff --git a/stubs/pony/pony/orm/examples/compositekeys.pyi b/stubs/pony/pony/orm/examples/compositekeys.pyi new file mode 100644 index 000000000000..0c747318d05e --- /dev/null +++ b/stubs/pony/pony/orm/examples/compositekeys.pyi @@ -0,0 +1,78 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Group(Entity): + dept: Incomplete + year: Incomplete + spec: Incomplete + students: Incomplete + courses: Incomplete + lessons: Incomplete + +class Department(Entity): + number: Incomplete + faculty: Incomplete + name: Incomplete + groups: Incomplete + teachers: Incomplete + +class Faculty(Entity): + number: Incomplete + name: Incomplete + depts: Incomplete + +class Student(Entity): + name: Incomplete + group: Incomplete + dob: Incomplete + grades: Incomplete + +class Grade(Entity): + student: Incomplete + task: Incomplete + date: Incomplete + value: Incomplete + +class Task(Entity): + course: Incomplete + type: Incomplete + number: Incomplete + descr: Incomplete + grades: Incomplete + +class Course(Entity): + subject: Incomplete + semester: Incomplete + groups: Incomplete + tasks: Incomplete + lessons: Incomplete + teachers: Incomplete + +class Subject(Entity): + name: Incomplete + descr: Incomplete + courses: Incomplete + +class Room(Entity): + building: Incomplete + number: Incomplete + floor: Incomplete + schedules: Incomplete + +class Teacher(Entity): + dept: Incomplete + name: Incomplete + courses: Incomplete + lessons: Incomplete + +class Lesson(Entity): + groups: Incomplete + course: Incomplete + room: Incomplete + teacher: Incomplete + date: Incomplete + +def test_queries() -> None: ... diff --git a/stubs/pony/pony/orm/examples/demo.pyi b/stubs/pony/pony/orm/examples/demo.pyi new file mode 100644 index 000000000000..1d6f7ad0fb48 --- /dev/null +++ b/stubs/pony/pony/orm/examples/demo.pyi @@ -0,0 +1,30 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Customer(Entity): + id: Incomplete + name: Incomplete + email: Incomplete + orders: Incomplete + +class Order(Entity): + id: Incomplete + total_price: Incomplete + customer: Incomplete + items: Incomplete + +class Product(Entity): + id: Incomplete + name: Incomplete + price: Incomplete + items: Incomplete + +class OrderItem(Entity): + quantity: Incomplete + order: Incomplete + product: Incomplete + +def populate_database() -> None: ... diff --git a/stubs/pony/pony/orm/examples/estore.pyi b/stubs/pony/pony/orm/examples/estore.pyi new file mode 100644 index 000000000000..79304586c90e --- /dev/null +++ b/stubs/pony/pony/orm/examples/estore.pyi @@ -0,0 +1,58 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Customer(Entity): + email: Incomplete + password: Incomplete + name: Incomplete + country: Incomplete + address: Incomplete + cart_items: Incomplete + orders: Incomplete + +class Product(Entity): + id: Incomplete + name: Incomplete + categories: Incomplete + description: Incomplete + picture: Incomplete + price: Incomplete + quantity: Incomplete + cart_items: Incomplete + order_items: Incomplete + +class CartItem(Entity): + quantity: Incomplete + customer: Incomplete + product: Incomplete + +class OrderItem(Entity): + quantity: Incomplete + price: Incomplete + order: Incomplete + product: Incomplete + +class Order(Entity): + id: Incomplete + state: Incomplete + date_created: Incomplete + date_shipped: Incomplete + date_delivered: Incomplete + total_price: Incomplete + customer: Incomplete + items: Incomplete + +class Category(Entity): + name: Incomplete + products: Incomplete + +CREATED: str +SHIPPED: str +DELIVERED: str +CANCELLED: str + +def populate_database() -> None: ... +def test_queries() -> None: ... diff --git a/stubs/pony/pony/orm/examples/inheritance1.pyi b/stubs/pony/pony/orm/examples/inheritance1.pyi new file mode 100644 index 000000000000..264e39221360 --- /dev/null +++ b/stubs/pony/pony/orm/examples/inheritance1.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Person(Entity): + id: Incomplete + name: Incomplete + dob: Incomplete + ssn: Incomplete + +class Student(Person): + group: Incomplete + mentor: Incomplete + attend_courses: Incomplete + +class Teacher(Person): + teach_courses: Incomplete + apprentices: Incomplete + salary: Incomplete + +class Assistant(Student, Teacher): ... + +class Professor(Teacher): + position: Incomplete + +class Group(Entity): + number: Incomplete + students: Incomplete + +class Course(Entity): + name: Incomplete + semester: Incomplete + students: Incomplete + teachers: Incomplete + +def populate_database() -> None: ... +def show_all_persons() -> None: ... diff --git a/stubs/pony/pony/orm/examples/numbers.pyi b/stubs/pony/pony/orm/examples/numbers.pyi new file mode 100644 index 000000000000..c28b662ead7b --- /dev/null +++ b/stubs/pony/pony/orm/examples/numbers.pyi @@ -0,0 +1,20 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Numbers(Entity): + id: Incomplete + int8: Incomplete + int16: Incomplete + int24: Incomplete + int32: Incomplete + int64: Incomplete + uint8: Incomplete + uint16: Incomplete + uint24: Incomplete + uint32: Incomplete + +def populate_database() -> None: ... +def test_data() -> None: ... diff --git a/stubs/pony/pony/orm/examples/session01.pyi b/stubs/pony/pony/orm/examples/session01.pyi new file mode 100644 index 000000000000..418f87b14dd5 --- /dev/null +++ b/stubs/pony/pony/orm/examples/session01.pyi @@ -0,0 +1,15 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Person(Entity): + name: str + age: int + +p1: Person +p2: Person +x: int +y: int +q: Incomplete diff --git a/stubs/pony/pony/orm/examples/university1.pyi b/stubs/pony/pony/orm/examples/university1.pyi new file mode 100644 index 000000000000..2619428cebc1 --- /dev/null +++ b/stubs/pony/pony/orm/examples/university1.pyi @@ -0,0 +1,43 @@ +from _typeshed import Incomplete +from decimal import Decimal as Decimal + +from pony.orm.core import Database, Entity + +db: Database + +class Department(Entity): + number: Incomplete + name: Incomplete + groups: Incomplete + courses: Incomplete + +class Group(Entity): + number: Incomplete + major: Incomplete + dept: Incomplete + students: Incomplete + +class Course(Entity): + name: Incomplete + semester: Incomplete + lect_hours: Incomplete + lab_hours: Incomplete + credits: Incomplete + dept: Incomplete + students: Incomplete + +class Student(Entity): + id: Incomplete + name: Incomplete + dob: Incomplete + tel: Incomplete + picture: Incomplete + gpa: Incomplete + group: Incomplete + courses: Incomplete + +params: dict[str, dict[str, str | bool] | dict[str, str | int]] + +def populate_database() -> None: ... +def print_students(students) -> None: ... +def test_queries() -> None: ... diff --git a/stubs/pony/pony/orm/examples/university2.pyi b/stubs/pony/pony/orm/examples/university2.pyi new file mode 100644 index 000000000000..40d13737e4ab --- /dev/null +++ b/stubs/pony/pony/orm/examples/university2.pyi @@ -0,0 +1,87 @@ +from _typeshed import Incomplete + +from pony.orm.core import Database, Entity + +db: Database + +class Faculty(Entity): + number: Incomplete + name: Incomplete + departments: Incomplete + +class Department(Entity): + number: Incomplete + name: Incomplete + faculty: Incomplete + teachers: Incomplete + majors: Incomplete + groups: Incomplete + +class Group(Entity): + number: Incomplete + grad_year: Incomplete + department: Incomplete + lessons: Incomplete + students: Incomplete + +class Student(Entity): + name: Incomplete + scholarship: Incomplete + group: Incomplete + grades: Incomplete + +class Major(Entity): + name: Incomplete + department: Incomplete + courses: Incomplete + +class Subject(Entity): + name: Incomplete + courses: Incomplete + teachers: Incomplete + +class Course(Entity): + major: Incomplete + subject: Incomplete + semester: Incomplete + lect_hours: Incomplete + pract_hours: Incomplete + credit: Incomplete + lessons: Incomplete + grades: Incomplete + +class Lesson(Entity): + day_of_week: Incomplete + meeting_time: Incomplete + classroom: Incomplete + course: Incomplete + teacher: Incomplete + groups: Incomplete + +class Grade(Entity): + student: Incomplete + course: Incomplete + teacher: Incomplete + date: Incomplete + value: Incomplete + +class Teacher(Entity): + name: Incomplete + degree: Incomplete + department: Incomplete + subjects: Incomplete + lessons: Incomplete + grades: Incomplete + +class Building(Entity): + number: Incomplete + description: Incomplete + classrooms: Incomplete + +class Classroom(Entity): + building: Incomplete + number: Incomplete + description: Incomplete + lessons: Incomplete + +def test_queries() -> None: ... diff --git a/stubs/pony/pony/orm/integration/__init__.pyi b/stubs/pony/pony/orm/integration/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/pony/pony/orm/integration/bottle_plugin.pyi b/stubs/pony/pony/orm/integration/bottle_plugin.pyi new file mode 100644 index 000000000000..d8bf1727386c --- /dev/null +++ b/stubs/pony/pony/orm/integration/bottle_plugin.pyi @@ -0,0 +1,6 @@ +def is_allowed_exception(e: BaseException | None) -> bool: ... + +class PonyPlugin: + name: str + api: int + def apply(self, callback, route): ... diff --git a/stubs/pony/pony/orm/ormtypes.pyi b/stubs/pony/pony/orm/ormtypes.pyi new file mode 100644 index 000000000000..dcac43038e40 --- /dev/null +++ b/stubs/pony/pony/orm/ormtypes.pyi @@ -0,0 +1,153 @@ +from _typeshed import Incomplete +from collections.abc import Mapping +from typing import Any +from typing_extensions import Self + +NoneType: type[None] + +class LongStr(str): + lazy: bool + +LongUnicode = LongStr + +class SetType: + def __deepcopy__(self, memo) -> Self: ... + item_type: Incomplete + def __init__(self, item_type) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + +class FuncType: + def __deepcopy__(self, memo) -> Self: ... + func: Incomplete + def __init__(self, func) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + +class MethodType: + def __deepcopy__(self, memo) -> Self: ... + obj: Incomplete + func: Incomplete + def __init__(self, method) -> None: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + def __hash__(self) -> int: ... + +raw_sql_cache: dict[str, Incomplete] + +def parse_raw_sql(sql: str): ... +def raw_sql(sql: str, result_type=None) -> RawSQL: ... + +class RawSQL: + def __deepcopy__(self, memo) -> None: ... + sql: str + result_type: Incomplete + def __init__( + self, sql: str, globals: dict[str, Any] | None = None, locals: Mapping[str, object] | None = None, result_type=None + ) -> None: ... + +class RawSQLType: + def __deepcopy__(self, memo) -> Self: ... + sql: str + items: Incomplete + types: Incomplete + result_type: Incomplete + def __init__(self, sql: str, items, types, result_type) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other) -> bool: ... + def __ne__(self, other) -> bool: ... + +class QueryType: + query_key: Incomplete + translator: Incomplete + limit: Incomplete + offset: Incomplete + def __init__(self, query, limit=None, offset=None) -> None: ... + def __hash__(self) -> int: ... + def __eq__(self, other): ... + def __ne__(self, other): ... + +def normalize(value): ... +def normalize_type(t): ... + +coercions: Incomplete + +def coerce_types(t1, t2): ... +def are_comparable_types(t1, t2, op: str = "=="): ... + +class TrackedValue: + obj_ref: Incomplete + attr: Incomplete + def __init__(self, obj, attr) -> None: ... + @classmethod + def make(cls, obj, attr, value): ... + def get_untracked(self) -> None: ... + +def tracked_method(func): ... + +class TrackedDict(TrackedValue, dict[Incomplete, Incomplete]): + def __init__(self, obj, attr, value) -> None: ... + def __reduce__(self): ... + __setitem__: Incomplete + __delitem__: Incomplete + def update(self, *args, **kwargs): ... + setdefault: Incomplete + pop: Incomplete + popitem: Incomplete + clear: Incomplete + def get_untracked(self): ... + +class TrackedList(TrackedValue, list[Incomplete]): + def __init__(self, obj, attr, value) -> None: ... + def __reduce__(self): ... + __setitem__: Incomplete + __delitem__: Incomplete + extend: Incomplete + append: Incomplete + pop: Incomplete + remove: Incomplete + insert: Incomplete + reverse: Incomplete + sort: Incomplete + clear: Incomplete + def get_untracked(self): ... + +def validate_item(item_type, item): ... + +class TrackedArray(TrackedList): + item_type: Incomplete + def __init__(self, obj, attr, value) -> None: ... + def extend(self, items) -> None: ... + def append(self, item) -> None: ... + def insert(self, index, item) -> None: ... + def __setitem__(self, index, item) -> None: ... + def __contains__(self, item) -> bool: ... + +class Json: + @classmethod + def default_empty_value(cls): ... + wrapped: Incomplete + def __init__(self, wrapped) -> None: ... + +class Array: + item_type: type | None + @classmethod + def default_empty_value(cls): ... + +class IntArray(Array): + item_type: type[int] + +class StrArray(Array): + item_type: type[str] + +class FloatArray(Array): + item_type: type[float] + +numeric_types: set[type] +comparable_types: set[type] +primitive_types: set[type] +function_types: set[type] +type_normalization_dict: dict[type, type] +array_types: dict[type, type[Array]] diff --git a/stubs/pony/pony/orm/serialization.pyi b/stubs/pony/pony/orm/serialization.pyi new file mode 100644 index 000000000000..b8acc580ce37 --- /dev/null +++ b/stubs/pony/pony/orm/serialization.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete +from collections import defaultdict +from collections.abc import Iterable +from datetime import date, datetime +from decimal import Decimal + +from pony.orm.core import Database, Entity + +class Bag: + database: Database + session_cache: Incomplete + entity_configs: dict[Entity, tuple[Incomplete, bool]] + objects: defaultdict[type[Entity], set[Entity]] + vars: dict[Incomplete, Incomplete] + dicts: defaultdict[Incomplete, dict[Incomplete, Incomplete]] + def __init__(bag, database: Database) -> None: ... + def config( + bag, + entity: Entity, + only=None, + exclude=None, + with_collections: bool = True, + with_lazy: bool = False, + related_objects: bool = True, + ) -> tuple[Incomplete, bool]: ... + def put(bag, x: Entity | Iterable[Entity]) -> None: ... + def to_dict(bag): ... + def to_json(bag) -> str: ... + +def to_dict(objects): ... +def to_json(objects) -> str: ... +def json_converter(x: datetime | date | Decimal) -> str: ... diff --git a/stubs/pony/pony/orm/sqlbuilding.pyi b/stubs/pony/pony/orm/sqlbuilding.pyi new file mode 100644 index 000000000000..f3aeca3077f6 --- /dev/null +++ b/stubs/pony/pony/orm/sqlbuilding.pyi @@ -0,0 +1,162 @@ +import sys +import types +from _typeshed import Incomplete +from collections.abc import Iterable +from typing import ClassVar + +class AstError(Exception): ... + +class Param: + style: Incomplete + id: Incomplete + paramkey: Incomplete + converter: Incomplete + optimistic: bool + def __init__(param, paramstyle, paramkey, converter=None, optimistic: bool = False) -> None: ... + def eval(param, values): ... + +class CompositeParam(Param): + items: Iterable[Param | Value] + func: Incomplete + def __init__(param, paramstyle, paramkey, items: Iterable[Param | Value], func) -> None: ... + def eval(param, values): ... + +class Value: + paramstyle: Incomplete + value: Incomplete + def __init__(self, paramstyle, value) -> None: ... + def quote_str(self, s: str) -> str: ... + +def flat(tree): ... +def flat_conditions(conditions): ... +def join(delimiter, items): ... +def move_conditions_from_inner_join_to_where(sections): ... +def make_binary_op(symbol, default_parentheses: bool = False): ... +def make_unary_func(symbol): ... +def indentable(method): ... + +class SQLBuilder: + dialect: ClassVar[str | None] + param_class: ClassVar[type[Param]] + composite_param_class: ClassVar[type[CompositeParam]] + value_class: ClassVar[type[Value]] + indent_spaces: ClassVar[str] + least_func_name: ClassVar[str] + greatest_func_name: ClassVar[str] + def __init__(builder, provider, ast) -> None: ... + def __call__(builder, ast): ... + def INSERT(builder, table_name, columns, values, returning=None): ... + def DEFAULT(builder): ... + def UPDATE(builder, table_name, pairs, where=None): ... + def DELETE(builder, alias, from_ast, where=None): ... + def SELECT(builder, *sections): ... + def SELECT_FOR_UPDATE(builder, nowait, skip_locked, *sections): ... + def EXISTS(builder, *sections): ... + def NOT_EXISTS(builder, *sections): ... + def ALL(builder, *expr_list): ... + def DISTINCT(builder, *expr_list): ... + def AGGREGATES(builder, *expr_list): ... + def AS(builder, expr, alias): ... + def compound_name(builder, name_parts): ... + def sql_join(builder, join_type, sources): ... + def FROM(builder, *sources): ... + def INNER_JOIN(builder, *sources): ... + def LEFT_JOIN(builder, *sources): ... + def WHERE(builder, *conditions): ... + def HAVING(builder, *conditions): ... + def GROUP_BY(builder, *expr_list): ... + def UNION(builder, kind, *sections): ... + def INTERSECT(builder, *sections): ... + def EXCEPT(builder, *sections): ... + def ORDER_BY(builder, *order_list): ... + def DESC(builder, expr): ... + def LIMIT(builder, limit, offset=None): ... + def COLUMN(builder, table_alias, col_name): ... + def PARAM(builder, paramkey, converter=None, optimistic: bool = False): ... + def make_param(builder, param_class, paramkey, *args): ... + def make_composite_param(builder, paramkey, items, func): ... + def STAR(builder, table_alias): ... + def ROW(builder, *items): ... + def VALUE(builder, value): ... + def AND(builder, *cond_list): ... + def OR(builder, *cond_list): ... + def NOT(builder, condition): ... + def POW(builder, expr1, expr2): ... + EQ: Incomplete + NE: Incomplete + LT: Incomplete + LE: Incomplete + GT: Incomplete + GE: Incomplete + ADD: Incomplete + SUB: Incomplete + MUL: Incomplete + DIV: Incomplete + FLOORDIV: Incomplete + def MOD(builder, a, b): ... + def FLOAT_EQ(builder, a, b): ... + def FLOAT_NE(builder, a, b): ... + def CONCAT(builder, *args): ... + def NEG(builder, expr): ... + def IS_NULL(builder, expr): ... + def IS_NOT_NULL(builder, expr): ... + def LIKE(builder, expr, template, escape=None): ... + def NOT_LIKE(builder, expr, template, escape=None): ... + def BETWEEN(builder, expr1, expr2, expr3): ... + def NOT_BETWEEN(builder, expr1, expr2, expr3): ... + def IN(builder, expr1, x): ... + def NOT_IN(builder, expr1, x): ... + def COUNT(builder, distinct, *expr_list): ... + def SUM(builder, distinct, expr): ... + def AVG(builder, distinct, expr): ... + def GROUP_CONCAT(builder, distinct, expr, sep=None): ... + UPPER: Incomplete + LOWER: Incomplete + LENGTH: Incomplete + ABS: Incomplete + def COALESCE(builder, *args): ... + def MIN(builder, distinct, *args): ... + def MAX(builder, distinct, *args): ... + def SUBSTR(builder, expr, start, len=None): ... + def STRING_SLICE(builder, expr, start, stop): ... + def CASE(builder, expr, cases, default=None): ... + def IF(builder, cond, then, else_): ... + def TRIM(builder, expr, chars=None): ... + def LTRIM(builder, expr, chars=None): ... + def RTRIM(builder, expr, chars=None): ... + def REPLACE(builder, str, from_, to): ... + def TO_INT(builder, expr): ... + def TO_STR(builder, expr): ... + def TO_REAL(builder, expr): ... + def TODAY(builder): ... + def NOW(builder): ... + def DATE(builder, expr): ... + def YEAR(builder, expr): ... + def MONTH(builder, expr): ... + def DAY(builder, expr): ... + def HOUR(builder, expr): ... + def MINUTE(builder, expr): ... + def SECOND(builder, expr): ... + def RANDOM(builder): ... + def RAWSQL(builder, sql): ... + def build_json_path(builder, path): ... + if sys.version_info >= (3, 10): + @classmethod + def eval_json_path(cls, values: Iterable[int | str | types.EllipsisType | slice]) -> str: ... + else: + @classmethod + def eval_json_path(cls, values: Iterable[int | str | type | slice]) -> str: ... + + def JSON_QUERY(builder, expr, path) -> None: ... + def JSON_VALUE(builder, expr, path, type) -> None: ... + def JSON_NONZERO(builder, expr) -> None: ... + def JSON_CONCAT(builder, left, right) -> None: ... + def JSON_CONTAINS(builder, expr, path, key) -> None: ... + def JSON_ARRAY_LENGTH(builder, value) -> None: ... + def JSON_PARAM(builder, expr): ... + def ARRAY_INDEX(builder, col, index) -> None: ... + def ARRAY_CONTAINS(builder, key, not_in, col) -> None: ... + def ARRAY_SUBSET(builder, array1, not_in, array2) -> None: ... + def ARRAY_LENGTH(builder, array) -> None: ... + def ARRAY_SLICE(builder, array, start, stop) -> None: ... + def MAKE_ARRAY(builder, *items) -> None: ... diff --git a/stubs/pony/pony/orm/sqlsymbols.pyi b/stubs/pony/pony/orm/sqlsymbols.pyi new file mode 100644 index 000000000000..237991537b91 --- /dev/null +++ b/stubs/pony/pony/orm/sqlsymbols.pyi @@ -0,0 +1,88 @@ +from typing import Final + +symbols: Final[list[str]] +SELECT: Final = "SELECT" +INSERT: Final = "INSERT" +UPDATE: Final = "UPDATE" +DELETE: Final = "DELETE" +SELECT_FOR_UPDATE: Final = "SELECT_FOR_UPDATE" +FROM: Final = "FROM" +INNER_JOIN: Final = "INNER_JOIN" +LEFT_JOIN: Final = "LEFT_JOIN" +WHERE: Final = "WHERE" +GROUP_BY: Final = "GROUP_BY" +HAVING: Final = "HAVING" +UNION: Final = "UNION" +INTERSECT: Final = "INTERSECT" +EXCEPT: Final = "EXCEPT" +ORDER_BY: Final = "ORDER_BY" +LIMIT: Final = "LIMIT" +ASC: Final = "ASC" +DESC: Final = "DESC" +DISTINCT: Final = "DISTINCT" +ALL: Final = "ALL" +AGGREGATES: Final = "AGGREGATES" +AS: Final = "AS" +COUNT: Final = "COUNT" +SUM: Final = "SUM" +MIN: Final = "MIN" +MAX: Final = "MAX" +AVG: Final = "AVG" +TABLE: Final = "TABLE" +COLUMN: Final = "COLUMN" +PARAM: Final = "PARAM" +VALUE: Final = "VALUE" +AND: Final = "AND" +OR: Final = "OR" +NOT: Final = "NOT" +EQ: Final = "EQ" +NE: Final = "NE" +LT: Final = "LT" +LE: Final = "LE" +GT: Final = "GT" +GE: Final = "GE" +IS_NULL: Final = "IS_NULL" +IS_NOT_NULL: Final = "IS_NOT_NULL" +LIKE: Final = "LIKE" +NOT_LIKE: Final = "NOT_LIKE" +BETWEEN: Final = "BETWEEN" +NOT_BETWEEN: Final = "NOT_BETWEEN" +IN: Final = "IN" +NOT_IN: Final = "NOT_IN" +EXISTS: Final = "EXISTS" +NOT_EXISTS: Final = "NOT_EXISTS" +ROW: Final = "ROW" +ADD: Final = "ADD" +SUB: Final = "SUB" +MUL: Final = "MUL" +DIV: Final = "DIV" +POW: Final = "POW" +NEG: Final = "NEG" +ABS: Final = "ABS" +UPPER: Final = "UPPER" +LOWER: Final = "LOWER" +CONCAT: Final = "CONCAT" +STRIN: Final = "STRIN" +SUBSTR: Final = "SUBSTR" +LENGTH: Final = "LENGTH" +TRIM: Final = "TRIM" +LTRIM: Final = "LTRIM" +RTRIM: Final = "RTRIM" +REPLACE: Final = "REPLACE" +CASE: Final = "CASE" +COALESCE: Final = "COALESCE" +TO_INT: Final = "TO_INT" +RANDOM: Final = "RANDOM" +DATE: Final = "DATE" +YEAR: Final = "YEAR" +MONTH: Final = "MONTH" +DAY: Final = "DAY" +HOUR: Final = "HOUR" +MINUTE: Final = "MINUTE" +SECOND: Final = "SECOND" +TODAY: Final = "TODAY" +NOW: Final = "NOW" +DATE_ADD: Final = "DATE_ADD" +DATE_SUB: Final = "DATE_SUB" +DATETIME_ADD: Final = "DATETIME_ADD" +DATETIME_SUB: Final = "DATETIME_SUB" diff --git a/stubs/pony/pony/orm/sqltranslation.pyi b/stubs/pony/pony/orm/sqltranslation.pyi new file mode 100644 index 000000000000..816405de10c2 --- /dev/null +++ b/stubs/pony/pony/orm/sqltranslation.pyi @@ -0,0 +1,773 @@ +import ast +import itertools +import re +import sys +import types +from _typeshed import Incomplete +from collections.abc import Generator, Iterable, Sequence +from datetime import date, datetime, time, timedelta +from decimal import Decimal +from random import random +from typing import Any, ClassVar, NoReturn +from typing_extensions import Self + +from pony.orm import core +from pony.orm.asttranslation import ASTTranslator +from pony.orm.ormtypes import raw_sql +from pony.utils import between, coalesce, concat, localbase + +NoneType: type[None] + +def check_comparable(left_monad: Monad, right_monad: Monad, op: str = "==") -> None: ... + +class IncomparableTypesError(TypeError): + type1: Incomplete + type2: Incomplete + def __init__(exc, type1, type2) -> None: ... + +def sqland(items): ... +def sqlor(items): ... +def join_tables(alias1, alias2, columns1, columns2): ... +def type2str(t) -> str: ... + +class Local(localbase): + translators: list[SQLTranslator] + def __init__(local) -> None: ... + @property + def translator(self) -> SQLTranslator: ... + +translator_counter: itertools.count[int] +local: Local + +class SQLTranslator(ASTTranslator): + dialect: ClassVar[str | None] + row_value_syntax: ClassVar[bool] + json_path_wildcard_syntax: ClassVar[bool] + json_values_are_comparable: ClassVar[bool] + rowid_support: ClassVar[bool] + registered_functions: dict[types.FunctionType, type[FuncMonad]] + def __enter__(translator) -> None: ... + def __exit__(translator, exc_type, exc_val, exc_tb) -> None: ... + def default_post(translator, node) -> None: ... + def dispatch(translator, node): ... + def dispatch_external(translator, node) -> None: ... + def call(translator, method, node): ... + def deepcopy(translator): ... + def __init__( + translator, + tree, + parent_translator, + code_key=None, + filter_num=None, + extractors=None, + vars=None, + vartypes=None, + left_join: bool = False, + optimize=None, + ) -> None: ... + def init( + translator, + tree, + parent_translator, + code_key=None, + filter_num=None, + extractors=None, + vars=None, + vartypes=None, + left_join: bool = False, + optimize=None, + ): ... + @property + def namespace(translator): ... + def can_be_optimized(translator): ... + def process_query_qual( + translator, prev_translator, prev_limit, prev_offset, names, try_extend_prev_query: bool = False + ) -> None: ... + def construct_subquery_ast( + translator, limit=None, offset=None, aliases=None, star=None, distinct=None, is_not_null_checks: bool = False + ): ... + def construct_sql_ast( + translator, + limit=None, + offset=None, + distinct=None, + aggr_func_name=None, + aggr_func_distinct=None, + sep=None, + for_update: bool = False, + nowait: bool = False, + skip_locked: bool = False, + is_not_null_checks: bool = False, + ): ... + def construct_delete_sql_ast(translator): ... + def get_used_attrs(translator): ... + def without_order(translator) -> Self: ... + def order_by_numbers(translator, numbers: Iterable[int]) -> Self: ... + def order_by_attributes(translator, attrs: Iterable[core.DescWrapper | core.Attribute]) -> Self: ... + def apply_kwfilters(translator, filterattrs, original_names: bool = False) -> Self: ... + def apply_lambda( + translator, func_id, filter_num, order_by, func_ast, argnames, original_names, extractors, vars, vartypes + ) -> Self: ... + def preGeneratorExp(translator, node: ast.GeneratorExp) -> QuerySetMonad: ... + def postExpr(translator, node: ast.Expr): ... + def preCompare(translator, node: ast.Compare): ... + def postConstant(translator, node: ast.Constant) -> ConstMonad: ... + if sys.version_info >= (3, 14): + def postNameConstant(translator, node: ast.Constant): ... + def postNum(translator, node: ast.Constant) -> ConstMonad: ... + def postStr(translator, node: ast.Constant) -> ConstMonad: ... + def postBytes(translator, node: ast.Constant) -> ConstMonad: ... + else: + def postNameConstant(translator, node: ast.NameConstant): ... + def postNum(translator, node: ast.Num) -> ConstMonad: ... + def postStr(translator, node: ast.Str) -> ConstMonad: ... + def postBytes(translator, node: ast.Bytes) -> ConstMonad: ... + + def postList(translator, node: ast.List) -> ListMonad: ... + def postTuple(translator, node: ast.Tuple) -> ListMonad: ... + def postName(translator, node: ast.Name) -> Monad: ... + def resolve_name(translator, name) -> Monad: ... + def postAdd(translator, node: ast.Add): ... + def postSub(translator, node: ast.Sub): ... + def postMult(translator, node: ast.Mult): ... + def postMatMult(translator, node: ast.MatMult) -> NoReturn: ... + def postDiv(translator, node: ast.Div): ... + def postFloorDiv(translator, node: ast.FloorDiv): ... + def postMod(translator, node: ast.Mod): ... + def postLShift(translator, node: ast.LShift) -> NoReturn: ... + def postRShift(translator, node: ast.RShift) -> NoReturn: ... + def postPow(translator, node: ast.Pow): ... + def postUSub(translator, node: ast.USub): ... + def postAttribute(translator, node: ast.Attribute): ... + def postAnd(translator, node: ast.And) -> AndMonad: ... + def postOr(translator, node: ast.Or) -> OrMonad: ... + def postBitOr(translator, node: ast.BitOr): ... + def postBitAnd(translator, node: ast.BitAnd): ... + def postBitXor(translator, node: ast.BitXor): ... + def postNot(translator, node: ast.Not): ... + def preCall(translator, node: ast.Call): ... + def postCall(translator, node: ast.Call): ... + def postkeyword(translator, node: ast.keyword) -> None: ... + def postSubscript(translator, node: ast.Subscript): ... + def postSlice(translator, node: ast.Slice) -> None: ... + def postIndex(translator, node: ast.Index): ... + def postIfExp(translator, node: ast.IfExp) -> ExprMonad: ... + def postJoinedStr(translator, node: ast.JoinedStr) -> StringExprMonad: ... + def postFormattedValue(translator, node: ast.FormattedValue): ... + +def combine_limit_and_offset(limit, offset, limit2, offset2) -> tuple[Incomplete, Incomplete]: ... +def coerce_monads(m1, m2, for_comparison: bool = False): ... + +max_alias_length: int + +class SqlQuery: + translator: Incomplete + parent_sqlquery: SqlQuery | None + left_join: bool + from_ast: list[Incomplete] + conditions: list[Incomplete] + outer_conditions: list[Incomplete] + tablerefs: dict[Incomplete, Incomplete] + alias_counters: dict[Incomplete, Incomplete] + expr_counter: itertools.count[int] + used_from_subquery: bool + def __init__(sqlquery, translator, parent_sqlquery: SqlQuery | None = None, left_join: bool = False) -> None: ... + def get_tableref(sqlquery, name_path): ... + def add_tableref(sqlquery, name_path, parent_tableref, attr) -> JoinedTableRef: ... + def make_alias(sqlquery, name: str) -> str: ... + def join_table(sqlquery, parent_alias, alias, table_name, join_cond) -> None: ... + +class TableRef: + sqlquery: SqlQuery + alias: str + name_path: str + entity: Incomplete + joined: bool + can_affect_distinct: bool + used_attrs: set[Incomplete] + def __init__(tableref, sqlquery: SqlQuery, name: str, entity) -> None: ... + def make_join(tableref, pk_only: bool = False) -> tuple[str, Incomplete]: ... + +class ExprTableRef(TableRef): + def __init__(tableref, sqlquery: SqlQuery, name: str, subquery_ast, expr_names, expr_aliases) -> None: ... + def make_join(tableref, pk_only: bool = False) -> tuple[str, Incomplete]: ... + +class StarTableRef(TableRef): + def __init__(tableref, sqlquery: SqlQuery, name: str, entity, subquery_ast) -> None: ... + def make_join(tableref, pk_only: bool = False) -> tuple[str, Incomplete]: ... + +class ExprJoinedTableRef: + def __init__(tableref, sqlquery: SqlQuery, parent_tableref, parent_columns, name, entity) -> None: ... + def make_join(tableref, pk_only: bool = False) -> tuple[str, Incomplete]: ... + +class JoinedTableRef: + sqlquery: SqlQuery + name_path: str + var_name: str | None + alias: str | None + optimized: bool | None + parent_tableref: Incomplete + attr: Incomplete + entity: Incomplete + joined: bool + can_affect_distinct: bool + used_attrs: set[Incomplete] + def __init__(tableref, sqlquery: SqlQuery, name_path: str, parent_tableref, attr) -> None: ... + def make_join(tableref, pk_only: bool = False) -> tuple[str, Incomplete]: ... + +def wrap_monad_method(cls_name: str, func: types.FunctionType): ... + +class MonadMeta(type): + def __new__(meta, cls_name: str, bases: tuple[type, ...], cls_dict: dict[str, Any]): ... + +class MonadMixin(metaclass=MonadMeta): ... + +class Monad(metaclass=MonadMeta): + disable_distinct: ClassVar[bool] + disable_ordering: ClassVar[bool] + node: Incomplete + translator: SQLTranslator + type: Incomplete + nullable: bool + def __init__(monad, type, nullable: bool = True) -> None: ... + def mixin_init(monad) -> None: ... + def to_single_cell_value(monad): ... + def cmp(monad, op, monad2): ... + def contains(monad, item, not_in: bool = False) -> None: ... + def nonzero(monad): ... + def negate(monad): ... + def getattr(monad, attrname): ... + def len(monad) -> None: ... + def count(monad, distinct=None): ... + def aggregate(monad, func_name, distinct=None, sep=None): ... + def __call__(monad, *args, **kwargs) -> None: ... + def __getitem__(monad, key) -> None: ... + def __add__(monad, monad2) -> None: ... + def __sub__(monad, monad2) -> None: ... + def __mul__(monad, monad2) -> None: ... + def __truediv__(monad, monad2) -> None: ... + def __floordiv__(monad, monad2) -> None: ... + def __pow__(monad, monad2) -> None: ... + def __neg__(monad) -> None: ... + def __or__(monad, monad2) -> None: ... + def __and__(monad, monad2) -> None: ... + def __xor__(monad, monad2) -> None: ... + def abs(monad) -> None: ... + def cast_from_json(monad, type) -> None: ... + def to_int(monad): ... + def to_str(monad): ... + def to_real(monad): ... + +def distinct_from_monad(distinct, default=None): ... + +class RawSQLMonad(Monad): + def __init__(monad, rawtype, varkey, nullable: bool = True) -> None: ... + def contains(monad, item, not_in: bool = False): ... + def nonzero(monad): ... + def getsql(monad, sqlquery=None): ... + +typeerror_re_1: re.Pattern[str] +typeerror_re_2: re.Pattern[str] + +def reraise_improved_typeerror(exc: Exception, func_name: str | tuple[str, ...], orig_func_name: str) -> NoReturn: ... +def raise_forgot_parentheses(monad: Monad) -> NoReturn: ... + +class MethodMonad(Monad): + def __init__(monad, parent, attrname) -> None: ... + def getattr(monad, attrname) -> None: ... + def __call__(monad, *args, **kwargs): ... + def contains(monad, item, not_in: bool = False) -> None: ... + def nonzero(monad) -> None: ... + def negate(monad) -> None: ... + def aggregate(monad, func_name, distinct=None, sep=None) -> None: ... + def __getitem__(monad, key) -> None: ... + def __add__(monad, monad2) -> None: ... + def __sub__(monad, monad2) -> None: ... + def __mul__(monad, monad2) -> None: ... + def __truediv__(monad, monad2) -> None: ... + def __floordiv__(monad, monad2) -> None: ... + def __pow__(monad, monad2) -> None: ... + def __neg__(monad) -> None: ... + def abs(monad) -> None: ... + +class EntityMonad(Monad): + def __init__(monad, entity) -> None: ... + def __getitem__(monad, *args) -> None: ... + +class ListMonad(Monad): + def __init__(monad, items) -> None: ... + def contains(monad, x, not_in: bool = False): ... + def getsql(monad, sqlquery=None): ... + +class BufferMixin(MonadMixin): ... +class UuidMixin(MonadMixin): ... + +def make_numeric_binop(op, sqlop): ... + +class NumericMixin(MonadMixin): + def mixin_init(monad) -> None: ... + __add__: Incomplete + __sub__: Incomplete + __mul__: Incomplete + __truediv__: Incomplete + __floordiv__: Incomplete + __mod__: Incomplete + __and__: Incomplete + __or__: Incomplete + __xor__: Incomplete + def __pow__(monad, monad2): ... + def __neg__(monad): ... + def abs(monad): ... + def nonzero(monad): ... + def negate(monad): ... + +def numeric_attr_factory(name): ... +def make_datetime_binop(op, sqlop): ... + +class DateMixin(MonadMixin): + def mixin_init(monad) -> None: ... + attr_year: Incomplete + attr_month: Incomplete + attr_day: Incomplete + def __add__(monad, other): ... + def __sub__(monad, other): ... + +class TimeMixin(MonadMixin): + def mixin_init(monad) -> None: ... + attr_hour: Incomplete + attr_minute: Incomplete + attr_second: Incomplete + +class TimedeltaMixin(MonadMixin): + def mixin_init(monad) -> None: ... + +class DatetimeMixin(DateMixin): + def mixin_init(monad) -> None: ... + def call_date(monad): ... + attr_hour: Incomplete + attr_minute: Incomplete + attr_second: Incomplete + def __add__(monad, other): ... + def __sub__(monad, other): ... + +def make_string_binop(op, sqlop): ... +def make_string_func(sqlop): ... + +class StringMixin(MonadMixin): + def mixin_init(monad) -> None: ... + __add__: Incomplete + def __getitem__(monad, index): ... + def negate(monad): ... + def nonzero(monad): ... + def len(monad): ... + def contains(monad, item, not_in: bool = False): ... + call_upper: Incomplete + call_lower: Incomplete + def call_startswith(monad, arg): ... + def call_endswith(monad, arg): ... + def strip(monad, chars, strip_type): ... + def call_strip(monad, chars=None): ... + def call_lstrip(monad, chars=None): ... + def call_rstrip(monad, chars=None): ... + +class JsonMixin: + disable_distinct: ClassVar[bool] + disable_ordering: ClassVar[bool] + def mixin_init(monad) -> None: ... + def get_path(monad): ... + def __getitem__(monad, key): ... + def contains(monad, key, not_in: bool = False): ... + def __or__(monad, other): ... + def len(monad): ... + def cast_from_json(monad, type): ... + def nonzero(monad): ... + +class ArrayMixin(MonadMixin): + def contains(monad, key, not_in: bool = False): ... + def len(monad): ... + def nonzero(monad): ... + def __getitem__(monad, index): ... + +class ObjectMixin(MonadMixin): + def mixin_init(monad) -> None: ... + def negate(monad): ... + def nonzero(monad): ... + def getattr(monad, attrname): ... + def requires_distinct(monad, joined: bool = False): ... + +class ObjectIterMonad(ObjectMixin, Monad): + def __init__(monad, tableref, entity) -> None: ... + def getsql(monad, sqlquery=None): ... + def requires_distinct(monad, joined: bool = False): ... + +class AttrMonad(Monad): + @staticmethod + def new(parent, attr, *args, **kwargs) -> AttrMonad: ... + def __new__(cls, parent, attr): ... + def __init__(monad, parent, attr) -> None: ... + def getsql(monad, sqlquery=None): ... + +class ObjectAttrMonad(ObjectMixin, AttrMonad): + def __init__(monad, parent, attr) -> None: ... + +class StringAttrMonad(StringMixin, AttrMonad): ... +class NumericAttrMonad(NumericMixin, AttrMonad): ... +class DateAttrMonad(DateMixin, AttrMonad): ... +class TimeAttrMonad(TimeMixin, AttrMonad): ... +class TimedeltaAttrMonad(TimedeltaMixin, AttrMonad): ... +class DatetimeAttrMonad(DatetimeMixin, AttrMonad): ... +class BufferAttrMonad(BufferMixin, AttrMonad): ... +class UuidAttrMonad(UuidMixin, AttrMonad): ... +class JsonAttrMonad(JsonMixin, AttrMonad): ... +class ArrayAttrMonad(ArrayMixin, AttrMonad): ... + +class ParamMonad(Monad): + @staticmethod + def new(t, paramkey) -> ParamMonad: ... + def __new__(cls, *args, **kwargs): ... + def __init__(monad, t, paramkey) -> None: ... + def getsql(monad, sqlquery=None): ... + +class ObjectParamMonad(ObjectMixin, ParamMonad): + def __init__(monad, entity, paramkey) -> None: ... + def getsql(monad, sqlquery=None): ... + def requires_distinct(monad, joined: bool = False) -> None: ... + +class StringParamMonad(StringMixin, ParamMonad): ... +class NumericParamMonad(NumericMixin, ParamMonad): ... +class DateParamMonad(DateMixin, ParamMonad): ... +class TimeParamMonad(TimeMixin, ParamMonad): ... +class TimedeltaParamMonad(TimedeltaMixin, ParamMonad): ... +class DatetimeParamMonad(DatetimeMixin, ParamMonad): ... +class BufferParamMonad(BufferMixin, ParamMonad): ... +class UuidParamMonad(UuidMixin, ParamMonad): ... + +class ArrayParamMonad(ArrayMixin, ParamMonad): + def __init__(monad, t, paramkey, list_monad=None) -> None: ... + def contains(monad, key, not_in: bool = False): ... + +class JsonParamMonad(JsonMixin, ParamMonad): + def getsql(monad, sqlquery=None): ... + +class ExprMonad(Monad): + @staticmethod + def new(t, sql, nullable: bool = True) -> ExprMonad: ... + def __new__(cls, *args, **kwargs): ... + def __init__(monad, type, sql, nullable: bool = True) -> None: ... + def getsql(monad, sqlquery=None): ... + +class ObjectExprMonad(ObjectMixin, ExprMonad): + def getsql(monad, sqlquery=None): ... + +class StringExprMonad(StringMixin, ExprMonad): ... +class NumericExprMonad(NumericMixin, ExprMonad): ... +class DateExprMonad(DateMixin, ExprMonad): ... +class TimeExprMonad(TimeMixin, ExprMonad): ... +class TimedeltaExprMonad(TimedeltaMixin, ExprMonad): ... +class DatetimeExprMonad(DatetimeMixin, ExprMonad): ... +class JsonExprMonad(JsonMixin, ExprMonad): ... +class ArrayExprMonad(ArrayMixin, ExprMonad): ... + +class JsonItemMonad(JsonMixin, Monad): + def __init__(monad, parent, key) -> None: ... + def get_path(monad): ... + def to_int(monad): ... + def to_str(monad): ... + def to_real(monad): ... + def cast_from_json(monad, type): ... + def getsql(monad): ... + +class ConstMonad(Monad): + @staticmethod + def new(value) -> ConstMonad: ... + def __new__(cls, value): ... + def __init__(monad, value) -> None: ... + def getsql(monad, sqlquery=None): ... + +class NoneMonad(ConstMonad): + type = NoneType + def __new__(cls, value=None): ... + def __init__(monad, value=None) -> None: ... + def cmp(monad, op, monad2): ... + def contains(monad, item, not_in: bool = False): ... + def nonzero(monad): ... + def negate(monad): ... + def getattr(monad, attrname): ... + def len(monad): ... + def count(monad, distinct=None): ... + def aggregate(monad, func_name, distinct=None, sep=None): ... + def __call__(monad, *args, **kwargs): ... + def __getitem__(monad, key): ... + def __add__(monad, monad2): ... + def __sub__(monad, monad2): ... + def __mul__(monad, monad2): ... + def __truediv__(monad, monad2): ... + def __floordiv__(monad, monad2): ... + def __pow__(monad, monad2): ... + def __neg__(monad): ... + def __or__(monad, monad2): ... + def __and__(monad, monad2): ... + def __xor__(monad, monad2): ... + def abs(monad): ... + def to_int(monad): ... + def to_str(monad): ... + def to_real(monad): ... + +class EllipsisMonad(ConstMonad): ... + +class StringConstMonad(StringMixin, ConstMonad): + def len(monad): ... + +class JsonConstMonad(JsonMixin, ConstMonad): ... +class BufferConstMonad(BufferMixin, ConstMonad): ... +class NumericConstMonad(NumericMixin, ConstMonad): ... +class DateConstMonad(DateMixin, ConstMonad): ... +class TimeConstMonad(TimeMixin, ConstMonad): ... +class TimedeltaConstMonad(TimedeltaMixin, ConstMonad): ... +class DatetimeConstMonad(DatetimeMixin, ConstMonad): ... + +class BoolMonad(Monad): + def __init__(monad, nullable: bool = True) -> None: ... + def nonzero(monad): ... + +sql_negation: dict[str, str] + +class BoolExprMonad(BoolMonad): + def __init__(monad, sql, nullable: bool = True) -> None: ... + def getsql(monad, sqlquery=None): ... + def negate(monad): ... + +cmp_ops: dict[str, str] +cmp_negate: dict[str, str] + +class CmpMonad(BoolMonad): + EQ: str + NE: str + def __init__(monad, op: str, left, right) -> None: ... + def negate(monad): ... + def getsql(monad, sqlquery=None): ... + +class LogicalBinOpMonad(BoolMonad): + def __init__(monad, operands) -> None: ... + def getsql(monad, sqlquery=None): ... + +class AndMonad(LogicalBinOpMonad): + binop: str + +class OrMonad(LogicalBinOpMonad): + binop: str + +class NotMonad(BoolMonad): + def __init__(monad, operand) -> None: ... + def negate(monad): ... + def getsql(monad, sqlquery=None): ... + +class HybridFuncMonad(Monad): + def __init__(monad, func_type, func_name, *params) -> None: ... + def __call__(monad, *args, **kwargs): ... + +class HybridMethodMonad(HybridFuncMonad): + def __init__(monad, parent, attrname, func) -> None: ... + +registered_functions: dict[types.FunctionType, type[FuncMonad]] + +class FuncMonadMeta(MonadMeta): + def __new__(meta, cls_name: str, bases: tuple[type, ...], cls_dict: dict[str, Any]): ... + +class FuncMonad(Monad, metaclass=FuncMonadMeta): + def __call__(monad, *args, **kwargs): ... + +def get_classes(classinfo) -> Generator[Incomplete]: ... + +class FuncIsinstanceMonad(FuncMonad): + func = isinstance + def call(monad, obj, classinfo): ... + +class FuncBufferMonad(FuncMonad): + func: type[bytes] + def call(monad, source, encoding=None, errors=None): ... + +class FuncBoolMonad(FuncMonad): + func: type[bool] + def call(monad, x): ... + +class FuncIntMonad(FuncMonad): + func: type[int] + def call(monad, x): ... + +class FuncStrMonad(FuncMonad): + func: type[str] + def call(monad, x): ... + +class FuncFloatMonad(FuncMonad): + func: type[float] + def call(monad, x): ... + +class FuncDecimalMonad(FuncMonad): + func: type[Decimal] + def call(monad, x): ... + +class FuncDateMonad(FuncMonad): + func: type[date] + def call(monad, year, month, day): ... + def call_today(monad): ... + +class FuncTimeMonad(FuncMonad): + func: type[time] + def call(monad, *args): ... + +class FuncTimedeltaMonad(FuncMonad): + func: type[timedelta] + def call(monad, days=None, seconds=None, microseconds=None, milliseconds=None, minutes=None, hours=None, weeks=None): ... + +class FuncDatetimeMonad(FuncDateMonad): + func: type[datetime] + def call(monad, year, month, day, hour=None, minute=None, second=None, microsecond=None): ... + def call_now(monad): ... + +class FuncBetweenMonad(FuncMonad): + func = between + def call(monad, x, a, b): ... + +class FuncConcatMonad(FuncMonad): + func = concat + def call(monad, *args): ... + +class FuncLenMonad(FuncMonad): + func = len + def call(monad, x): ... + +class FuncGetattrMonad(FuncMonad): + func = getattr + def call(monad, obj_monad, name_monad): ... + +class FuncRawSQLMonad(FuncMonad): + func = raw_sql + def call(monad, *args) -> None: ... + +class FuncCountMonad(FuncMonad): + func: Incomplete + def call(monad, x=None, distinct=None): ... + +class FuncAbsMonad(FuncMonad): + func = abs + def call(monad, x): ... + +class FuncSumMonad(FuncMonad): + func: Incomplete + def call(monad, x, distinct=None): ... + +class FuncAvgMonad(FuncMonad): + func: Incomplete + def call(monad, x, distinct=None): ... + +class FuncGroupConcatMonad(FuncMonad): + func: Incomplete + def call(monad, x, sep=None, distinct=None): ... + +class FuncCoalesceMonad(FuncMonad): + func = coalesce + def call(monad, *args): ... + +class FuncDistinctMonad(FuncMonad): + func: Incomplete + def call(monad, x): ... + +class FuncMinMonad(FuncMonad): + func: Incomplete + def call(monad, *args): ... + +class FuncMaxMonad(FuncMonad): + func: Incomplete + def call(monad, *args): ... + +def minmax(monad, sqlop, *args): ... + +class FuncSelectMonad(FuncMonad): + func = core.select + def call(monad, queryset): ... + +class FuncExistsMonad(FuncMonad): + func = core.exists + def call(monad, arg): ... + +class FuncDescMonad(FuncMonad): + func = core.desc + def call(monad, expr): ... + +class DescMonad(Monad): + def __init__(monad, expr) -> None: ... + def getsql(monad): ... + +class JoinMonad(Monad): + def __init__(monad, type) -> None: ... + def __call__(monad, x): ... + +class FuncRandomMonad(FuncMonad): + func = random + def __init__(monad, type) -> None: ... + def __call__(monad): ... + +class SetMixin(MonadMixin): + forced_distinct: bool + def call_distinct(monad): ... + +def make_attrset_binop(op, sqlop): ... + +class AttrSetMonad(SetMixin, Monad): + def __init__(monad, parent, attr) -> None: ... + def cmp(monad, op, monad2) -> None: ... + def contains(monad, item, not_in: bool = False): ... + def getattr(monad, name): ... + def call_select(monad): ... + call_filter = call_select + def call_exists(monad): ... + def requires_distinct(monad, joined: bool = False, for_count: bool = False): ... + def count(monad, distinct=None): ... + len = count + def aggregate(monad, func_name, distinct=None, sep=None): ... + def nonzero(monad): ... + def negate(monad): ... + call_is_empty = negate + def make_tableref(monad, sqlquery): ... + def make_expr_list(monad): ... + def getsql(monad, sqlquery=None): ... + __add__: Incomplete + __sub__: Incomplete + __mul__: Incomplete + __truediv__: Incomplete + __floordiv__: Incomplete + +def make_numericset_binop(op, sqlop): ... + +class NumericSetExprMonad(SetMixin, Monad): + def __init__(monad, op, sqlop, left, right) -> None: ... + def aggregate(monad, func_name, distinct=None, sep=None): ... + def getsql(monad, sqlquery=None): ... + __add__: Incomplete + __sub__: Incomplete + __mul__: Incomplete + __truediv__: Incomplete + __floordiv__: Incomplete + +class QuerySetMonad(SetMixin, Monad): + nogroup: bool + def __init__(monad, subtranslator) -> None: ... + def to_single_cell_value(monad): ... + def requires_distinct(monad, joined: bool = False) -> None: ... + def call_limit(monad, limit=None, offset=None): ... + def contains(monad, item, not_in: bool = False): ... + def nonzero(monad): ... + def negate(monad): ... + def count(monad, distinct=None): ... + len = count + def aggregate(monad, func_name, distinct=None, sep=None): ... + def call_count(monad, distinct=None): ... + def call_sum(monad, distinct=None): ... + def call_min(monad): ... + def call_max(monad): ... + def call_avg(monad, distinct=None): ... + def call_group_concat(monad, sep=None, distinct=None): ... + def getsql(monad): ... + +def find_or_create_having_ast(sections: list[Sequence[str]]): ... diff --git a/stubs/pony/pony/py23compat.pyi b/stubs/pony/pony/py23compat.pyi new file mode 100644 index 000000000000..0854571aa135 --- /dev/null +++ b/stubs/pony/pony/py23compat.pyi @@ -0,0 +1,13 @@ +PYPY: bool +PY36: bool +PY37: bool +PY38: bool +PY39: bool +PY310: bool +PY311: bool +PY312: bool +unicode = str +buffer = bytes +int_types: tuple[type[int]] + +def cmp(a, b): ... diff --git a/stubs/pony/pony/thirdparty/__init__.pyi b/stubs/pony/pony/thirdparty/__init__.pyi new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/stubs/pony/pony/thirdparty/decorator.pyi b/stubs/pony/pony/thirdparty/decorator.pyi new file mode 100644 index 000000000000..1f521275e4a8 --- /dev/null +++ b/stubs/pony/pony/thirdparty/decorator.pyi @@ -0,0 +1,31 @@ +import re +from _typeshed import Incomplete +from collections.abc import Callable +from inspect import getfullargspec as getfullargspec +from typing import Final + +__version__: Final[str] +__all__ = ["decorator", "FunctionMaker", "contextmanager"] + +def get_init(cls: object) -> Callable[..., None]: ... + +DEF: re.Pattern[str] + +class FunctionMaker: + shortsignature: Incomplete + name: Incomplete + doc: Incomplete + module: Incomplete + annotations: Incomplete + signature: Incomplete + dict: Incomplete + defaults: Incomplete + def __init__(self, func=None, name=None, signature=None, defaults=None, doc=None, module=None, funcdict=None) -> None: ... + def update(self, func, **kw) -> None: ... + def make(self, src_templ, evaldict=None, addsource: bool = False, **attrs): ... + @classmethod + def create(cls, obj, body, evaldict, defaults=None, doc=None, module=None, addsource: bool = True, **attrs): ... + +def decorator(caller, func=None): ... + +contextmanager: Incomplete diff --git a/stubs/pony/pony/utils/__init__.pyi b/stubs/pony/pony/utils/__init__.pyi new file mode 100644 index 000000000000..f08d95e954ed --- /dev/null +++ b/stubs/pony/pony/utils/__init__.pyi @@ -0,0 +1,2 @@ +from .properties import * +from .utils import * diff --git a/stubs/pony/pony/utils/properties.pyi b/stubs/pony/pony/utils/properties.pyi new file mode 100644 index 000000000000..912a26d34900 --- /dev/null +++ b/stubs/pony/pony/utils/properties.pyi @@ -0,0 +1,16 @@ +from _typeshed import Incomplete + +class cached_property: + func: Incomplete + def __init__(self, func) -> None: ... + def __get__(self, obj, cls): ... + +class class_property: + func: Incomplete + def __init__(self, func) -> None: ... + def __get__(self, instance, cls): ... + +class class_cached_property: + func: Incomplete + def __init__(self, func) -> None: ... + def __get__(self, obj, cls): ... diff --git a/stubs/pony/pony/utils/utils.pyi b/stubs/pony/pony/utils/utils.pyi new file mode 100644 index 000000000000..79f9e6cb83b0 --- /dev/null +++ b/stubs/pony/pony/utils/utils.pyi @@ -0,0 +1,86 @@ +import ast +import io +import re +from _typeshed import Incomplete, Unused +from collections.abc import Callable, Iterable +from datetime import datetime +from threading import local as _localbase +from types import CodeType, FunctionType, TracebackType +from typing import Any, NoReturn, overload + +localbase = _localbase + +class PonyDeprecationWarning(DeprecationWarning): ... + +def deprecated(stacklevel: int, message: str) -> None: ... +def decorator(caller, func=None): ... +def decorator_with_params(dec): ... +def cut_traceback(func): ... + +cut_traceback_depth: int + +@overload +def reraise(exc_type: Unused, exc: None, tb: TracebackType | None) -> None: ... +@overload +def reraise(exc_type: Unused, exc: BaseException, tb: TracebackType | None) -> NoReturn: ... +def throw(exc_type: Exception | Callable[..., Exception], *args, **kwargs) -> NoReturn: ... +def truncate_repr(s: object, max_len: int = 100) -> str: ... + +codeobjects: dict[int, CodeType] + +def get_codeobject_id(codeobject: CodeType) -> int: ... + +lambda_args_cache: dict[int | ast.Lambda, list[str]] + +def get_lambda_args(func: FunctionType | ast.Lambda) -> list[str]: ... +def error_method(*args: Unused, **kwargs: Unused) -> NoReturn: ... +def is_ident(string: str) -> bool: ... +def split_name(name: str) -> list[str]: ... +def uppercase_name(name: str) -> str: ... +def lowercase_name(name: str) -> str: ... +def camelcase_name(name: str) -> str: ... +def mixedcase_name(name: str) -> str: ... +def import_module(name: str): ... +def is_absolute_path(filename: str) -> bool: ... +def absolutize_path(filename: str, frame_depth: int) -> str: ... +def current_timestamp() -> str: ... +def datetime2timestamp(d: datetime) -> str: ... +def timestamp2datetime(t: str) -> datetime: ... + +expr1_re: re.Pattern[str] +expr2_re: re.Pattern[str] +expr3_re: re.Pattern[str] + +def parse_expr(s: str, pos: int = 0) -> tuple[str, bool]: ... +def tostring(x): ... +def strjoin( + sep: str, strings: Iterable[str], source_encoding: str = "ascii", dest_encoding: str | None = None +) -> str | bytes: ... +def count(*args, **kwargs): ... +def avg(iter: Iterable[float | None]) -> float | None: ... +@overload +def group_concat(items: None, sep: str = ",") -> None: ... +@overload +def group_concat(items: Iterable[object], sep: str = ",") -> str: ... +def coalesce(*args: Any) -> Any: ... +def distinct(iter): ... +def concat(*args) -> str: ... +def between(x: float, a: float, b: float) -> bool: ... +def is_utf8(encoding: str) -> bool: ... +def pickle_ast(val): ... +def unpickle_ast(pickled: io.BytesIO): ... +def copy_ast(tree): ... + +class HashableDict(dict[Incomplete, Incomplete]): + def __hash__(self) -> int: ... # type: ignore[override] + def __deepcopy__(self, memo): ... + __setitem__: Incomplete + __delitem__: Incomplete + clear: Incomplete + pop: Incomplete + popitem: Incomplete + setdefault: Incomplete + update: Incomplete + +def deref_proxy(value): ... +def deduplicate(value, deduplication_cache): ...