resolve merge conflicts

This commit is contained in:
collerek
2021-10-09 16:30:35 +02:00
62 changed files with 2797 additions and 451 deletions

View File

@ -1,5 +1,5 @@
[flake8]
ignore = ANN101, ANN102, W503, S101
ignore = ANN101, ANN102, W503, S101, CFQ004
max-complexity = 8
max-line-length = 88
import-order-style = pycharm

View File

@ -20,12 +20,13 @@ jobs:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
python -m pip install --upgrade poetry
poetry install --no-dev
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }}
POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
poetry publish --build

View File

@ -19,7 +19,6 @@ jobs:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
fail-fast: false
services:
mysql:
image: mysql:5.7
@ -50,8 +49,10 @@ jobs:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install pip==21.0.1
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
python -m pip install poetry==1.1.11
poetry install
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Run mysql
env:
DATABASE_URL: "mysql://username:password@127.0.0.1:3306/testsuite"
@ -64,7 +65,7 @@ jobs:
env:
DATABASE_URL: "sqlite:///testsuite"
run: bash scripts/test.sh
- run: mypy --config-file mypy.ini ormar tests
- run: mypy ormar tests
- name: Upload coverage
uses: codecov/codecov-action@v1
- name: Test & publish code coverage

View File

@ -22,38 +22,33 @@ It should be quite straight forward to get started and create a Pull Request.
To make contributing as easy and fast as possible, you'll want to run tests and linting locally.
You'll need to have **python 3.6**, **3.7**, or **3.8**, **virtualenv**, and **git** installed.
You'll need to have **python 3.6.2**, **3.7**, or **3.8**, **poetry**, and **git** installed.
```bash
# 1. clone your fork and cd into the repo directory
git clone git@github.com:<your username>/ormar.git
cd ormar
# 2. Set up a virtualenv for running tests
virtualenv -p `which python3.7` env
source env/bin/activate
# (or however you prefer to setup a python environment, 3.6 will work too)
# 2. Install ormar, dependencies and test dependencies
poetry install -E dev
# 3. Install ormar, dependencies and test dependencies
pip install -r requirements.txt
# 4. Checkout a new branch and make your changes
# 3. Checkout a new branch and make your changes
git checkout -b my-new-feature-branch
# make your changes...
# 5. Formatting and linting
# 4. Formatting and linting
# ormar uses black for formatting, flake8 for linting and mypy for type hints check
# run all of the following as all those calls will be run on travis after every push
black ormar tests
flake8 ormar
mypy --config-file mypy.ini ormar tests
mypy ormar tests
# 6. Run tests
# 5. Run tests
# on localhost all tests are run against sglite backend
# rest of the backends will be checked after push
pytest -svv --cov=ormar --cov=tests --cov-fail-under=100 --cov-report=term-missing
# 7. Build documentation
# 6. Build documentation
mkdocs build
# if you have changed the documentation make sure it builds successfully
# you can also use `mkdocs serve` to serve the documentation at localhost:8000

View File

@ -17,7 +17,7 @@ divided into categories according to the function they fulfill.
Note that some functions/methods are in multiple categories.
For complicity also Models and relations methods are listed.
For completeness, Model and relation methods are listed.
To read more about any specific section or function please refer to the details subpage.

View File

@ -109,6 +109,9 @@ assert all_cars[0].manufacturer.founded == 1937
Mandatory fields cannot be excluded as it will raise `ValidationError`, to
exclude a field it has to be nullable.
The `values()` method can be used to exclude mandatory fields, though data will
be returned as a `dict`.
You cannot exclude mandatory model columns - `manufacturer__name` in this example.
```python
@ -176,7 +179,7 @@ await Car.objects.select_related('manufacturer').fields({'id': ...,
So operations like `filter()`, `select_related()`, `limit()` and `offset()` etc. can be chained.
Something like `Track.object.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
Something like `Track.objects.select_related("album").filter(album__name="Malibu").offset(1).limit(1).all()`
## exclude_fields
@ -274,6 +277,9 @@ await Car.objects.select_related('manufacturer').exclude_fields([{'company': {'n
Mandatory fields cannot be excluded as it will raise `ValidationError`, to
exclude a field it has to be nullable.
The `values()` method can be used to exclude mandatory fields, though data will
be returned as a `dict`.
!!!tip
Pk column cannot be excluded - it's always auto added even if explicitly
excluded.

View File

@ -1,10 +0,0 @@
[mypy]
python_version = 3.8
plugins = pydantic.mypy
[mypy-sqlalchemy.*]
ignore_missing_imports = True
[mypy-tests.test_model_definition.*]
ignore_errors = True

View File

@ -19,6 +19,10 @@ snakes, and ormar(e) in italian which means cabinet.
And what's a better name for python ORM than snakes cabinet :)
"""
try:
from importlib.metadata import version # type: ignore
except ImportError: # pragma: no cover
from importlib_metadata import version # type: ignore
from ormar.protocols import QuerySetProtocol, RelationProtocol # noqa: I100
from ormar.decorators import ( # noqa: I100
post_delete,
@ -64,7 +68,7 @@ from ormar.fields import (
UUID,
UniqueColumns,
) # noqa: I100
from ormar.models import ExcludableItems, Model, Extra
from ormar.models import ExcludableItems, Extra, Model
from ormar.models.metaclass import ModelMeta
from ormar.queryset import OrderAction, QuerySet, and_, or_
from ormar.relations import RelationType
@ -78,7 +82,7 @@ class UndefinedType: # pragma no cover
Undefined = UndefinedType()
__version__ = "0.10.20"
__version__ = version("ormar")
__all__ = [
"Integer",
"BigInteger",

View File

@ -41,7 +41,7 @@ def receiver(
return _decorator
def post_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
def post_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_save signal.
@ -54,7 +54,7 @@ def post_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
return receiver(signal="post_save", senders=senders)
def post_update(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
def post_update(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_update signal.
@ -67,7 +67,7 @@ def post_update(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable
return receiver(signal="post_update", senders=senders)
def post_delete(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
def post_delete(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for post_delete signal.
@ -80,7 +80,7 @@ def post_delete(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable
return receiver(signal="post_delete", senders=senders)
def pre_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
def pre_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
"""
Connect given function to all senders for pre_save signal.

View File

@ -5,7 +5,7 @@ Gathers all exceptions thrown by ormar.
class AsyncOrmException(Exception):
"""
Base ormar Exception
Base ormar Exception
"""
pass
@ -13,13 +13,13 @@ class AsyncOrmException(Exception):
class ModelDefinitionError(AsyncOrmException):
"""
Raised for errors related to the model definition itself:
Raised for errors related to the model definition itself:
* setting @property_field on method with arguments other than func(self)
* defining a Field without required parameters
* defining a model with more than one primary_key
* defining a model without primary_key
* setting primary_key column as pydantic_only
* setting @property_field on method with arguments other than func(self)
* defining a Field without required parameters
* defining a model with more than one primary_key
* defining a model without primary_key
* setting primary_key column as pydantic_only
"""
pass
@ -27,7 +27,7 @@ class ModelDefinitionError(AsyncOrmException):
class ModelError(AsyncOrmException):
"""
Raised for initialization of model with non-existing field keyword.
Raised for initialization of model with non-existing field keyword.
"""
pass
@ -35,7 +35,7 @@ class ModelError(AsyncOrmException):
class NoMatch(AsyncOrmException):
"""
Raised for database queries that has no matching result (empty result).
Raised for database queries that has no matching result (empty result).
"""
pass
@ -43,8 +43,8 @@ class NoMatch(AsyncOrmException):
class MultipleMatches(AsyncOrmException):
"""
Raised for database queries that should return one row (i.e. get, first etc.)
but has multiple matching results in response.
Raised for database queries that should return one row (i.e. get, first etc.)
but has multiple matching results in response.
"""
pass
@ -52,11 +52,11 @@ class MultipleMatches(AsyncOrmException):
class QueryDefinitionError(AsyncOrmException):
"""
Raised for errors in query definition:
Raised for errors in query definition:
* using contains or icontains filter with instance of the Model
* using Queryset.update() without filter and setting each flag to True
* using Queryset.delete() without filter and setting each flag to True
* using contains or icontains filter with instance of the Model
* using Queryset.update() without filter and setting each flag to True
* using Queryset.delete() without filter and setting each flag to True
"""
pass
@ -68,8 +68,8 @@ class RelationshipInstanceError(AsyncOrmException):
class ModelPersistenceError(AsyncOrmException):
"""
Raised for update of models without primary_key set (cannot retrieve from db)
or for saving a model with relation to unsaved model (cannot extract fk value).
Raised for update of models without primary_key set (cannot retrieve from db)
or for saving a model with relation to unsaved model (cannot extract fk value).
"""
pass
@ -77,7 +77,7 @@ class ModelPersistenceError(AsyncOrmException):
class SignalDefinitionError(AsyncOrmException):
"""
Raised when non callable receiver is passed as signal callback.
Raised when non callable receiver is passed as signal callback.
"""
pass

View File

@ -92,7 +92,7 @@ def create_dummy_model(
def populate_fk_params_based_on_to_model(
to: Type["T"], nullable: bool, onupdate: str = None, ondelete: str = None,
to: Type["T"], nullable: bool, onupdate: str = None, ondelete: str = None
) -> Tuple[Any, List, Any]:
"""
Based on target to model to which relation leads to populates the type of the
@ -182,7 +182,7 @@ def ForeignKey(to: ForwardRef, **kwargs: Any) -> "Model": # pragma: no cover
...
def ForeignKey( # noqa CFQ002
def ForeignKey( # type: ignore # noqa CFQ002
to: "ToType",
*,
name: str = None,
@ -347,9 +347,7 @@ class ForeignKeyField(BaseField):
"""
if self.to.__class__ == ForwardRef:
self.to = evaluate_forwardref(
self.to, # type: ignore
globalns,
localns or None,
self.to, globalns, localns or None # type: ignore
)
(
self.__type__,
@ -363,7 +361,7 @@ class ForeignKeyField(BaseField):
)
def _extract_model_from_sequence(
self, value: List, child: "Model", to_register: bool,
self, value: List, child: "Model", to_register: bool
) -> List["Model"]:
"""
Takes a list of Models and registers them on parent.
@ -382,13 +380,13 @@ class ForeignKeyField(BaseField):
"""
return [
self.expand_relationship( # type: ignore
value=val, child=child, to_register=to_register,
value=val, child=child, to_register=to_register
)
for val in value
]
def _register_existing_model(
self, value: "Model", child: "Model", to_register: bool,
self, value: "Model", child: "Model", to_register: bool
) -> "Model":
"""
Takes already created instance and registers it for parent.
@ -479,9 +477,7 @@ class ForeignKeyField(BaseField):
:param child: child model
:type child: Model class
"""
model._orm.add(
parent=model, child=child, field=self,
)
model._orm.add(parent=model, child=child, field=self)
def has_unresolved_forward_refs(self) -> bool:
"""

View File

@ -78,7 +78,7 @@ def ManyToMany(to: ForwardRef, **kwargs: Any) -> "RelationProxy": # pragma: no
...
def ManyToMany(
def ManyToMany( # type: ignore
to: "ToType",
through: Optional["ToType"] = None,
*,
@ -223,20 +223,16 @@ class ManyToManyField(ForeignKeyField, ormar.QuerySetProtocol, ormar.RelationPro
"""
if self.to.__class__ == ForwardRef:
self.to = evaluate_forwardref(
self.to, # type: ignore
globalns,
localns or None,
self.to, globalns, localns or None # type: ignore
)
(self.__type__, self.column_type,) = populate_m2m_params_based_on_to_model(
to=self.to, nullable=self.nullable,
(self.__type__, self.column_type) = populate_m2m_params_based_on_to_model(
to=self.to, nullable=self.nullable
)
if self.through.__class__ == ForwardRef:
self.through = evaluate_forwardref(
self.through, # type: ignore
globalns,
localns or None,
self.through, globalns, localns or None # type: ignore
)
forbid_through_relations(self.through)

View File

@ -12,7 +12,7 @@ from ormar.fields.base import BaseField # noqa I101
from ormar.fields.sqlalchemy_encrypted import EncryptBackends
try:
from typing import Literal
from typing import Literal # type: ignore
except ImportError: # pragma: no cover
from typing_extensions import Literal # type: ignore
@ -254,9 +254,7 @@ class Text(ModelFieldFactory, str):
_type = str
_sample = "text"
def __new__( # type: ignore
cls, **kwargs: Any
) -> BaseField:
def __new__(cls, **kwargs: Any) -> BaseField: # type: ignore
kwargs = {
**kwargs,
**{
@ -470,13 +468,13 @@ class JSON(ModelFieldFactory, pydantic.Json):
if TYPE_CHECKING: # pragma: nocover # noqa: C901
@overload
def LargeBinary(
def LargeBinary( # type: ignore
max_length: int, *, represent_as_base64_str: Literal[True], **kwargs: Any
) -> str:
...
@overload
def LargeBinary(
def LargeBinary( # type: ignore
max_length: int, *, represent_as_base64_str: Literal[False], **kwargs: Any
) -> bytes:
...

View File

@ -6,7 +6,7 @@ from typing import Any, Callable, Optional, TYPE_CHECKING, Type, Union
import sqlalchemy.types as types
from pydantic.utils import lenient_issubclass
from sqlalchemy.engine.default import DefaultDialect
from sqlalchemy.engine import Dialect
import ormar # noqa: I100, I202
from ormar import ModelDefinitionError # noqa: I202, I100
@ -146,14 +146,14 @@ class EncryptedString(types.TypeDecorator):
def __repr__(self) -> str: # pragma: nocover
return "TEXT()"
def load_dialect_impl(self, dialect: DefaultDialect) -> Any:
def load_dialect_impl(self, dialect: Dialect) -> Any:
return dialect.type_descriptor(types.TEXT())
def _refresh(self) -> None:
key = self._key() if callable(self._key) else self._key
self.backend._refresh(key)
def process_bind_param(self, value: Any, dialect: DefaultDialect) -> Optional[str]:
def process_bind_param(self, value: Any, dialect: Dialect) -> Optional[str]:
if value is None:
return value
self._refresh()
@ -167,7 +167,7 @@ class EncryptedString(types.TypeDecorator):
encrypted_value = self.backend.encrypt(value)
return encrypted_value
def process_result_value(self, value: Any, dialect: DefaultDialect) -> Any:
def process_result_value(self, value: Any, dialect: Dialect) -> Any:
if value is None:
return value
self._refresh()

View File

@ -2,7 +2,7 @@ import uuid
from typing import Any, Optional
from sqlalchemy import CHAR
from sqlalchemy.engine.default import DefaultDialect
from sqlalchemy.engine import Dialect
from sqlalchemy.types import TypeDecorator
@ -25,22 +25,20 @@ class UUID(TypeDecorator):
return "CHAR(36)"
return "CHAR(32)"
def load_dialect_impl(self, dialect: DefaultDialect) -> Any:
def load_dialect_impl(self, dialect: Dialect) -> Any:
return (
dialect.type_descriptor(CHAR(36))
if self.uuid_format == "string"
else dialect.type_descriptor(CHAR(32))
)
def process_bind_param(
self, value: uuid.UUID, dialect: DefaultDialect
) -> Optional[str]:
def process_bind_param(self, value: uuid.UUID, dialect: Dialect) -> Optional[str]:
if value is None:
return value
return str(value) if self.uuid_format == "string" else "%.32x" % value.int
def process_result_value(
self, value: Optional[str], dialect: DefaultDialect
self, value: Optional[str], dialect: Dialect
) -> Optional[uuid.UUID]:
if value is None:
return value

View File

@ -15,7 +15,7 @@ if TYPE_CHECKING: # pragma no cover
def Through( # noqa CFQ002
to: "ToType", *, name: str = None, related_name: str = None, **kwargs: Any,
to: "ToType", *, name: str = None, related_name: str = None, **kwargs: Any
) -> Any:
"""
Despite a name it's a function that returns constructed ThroughField.

View File

@ -29,7 +29,7 @@ def is_field_an_forward_ref(field: "BaseField") -> bool:
)
def populate_default_options_values(
def populate_default_options_values( # noqa: CCR001
new_model: Type["Model"], model_fields: Dict
) -> None:
"""

View File

@ -64,8 +64,11 @@ def convert_choices_if_needed( # noqa: CCR001
value = value.isoformat() if not isinstance(value, str) else value
choices = [o.isoformat() for o in field.choices]
elif field.__type__ == pydantic.Json:
value = json.dumps(value) if not isinstance(value, str) else value
value = (
json.dumps(value) if not isinstance(value, str) else re_dump_value(value)
)
value = value.decode("utf-8") if isinstance(value, bytes) else value
choices = [re_dump_value(x) for x in field.choices]
elif field.__type__ == uuid.UUID:
value = str(value) if not isinstance(value, str) else value
choices = [str(o) for o in field.choices]
@ -86,6 +89,21 @@ def convert_choices_if_needed( # noqa: CCR001
return value, choices
def re_dump_value(value: str) -> str:
"""
Rw-dumps choices due to different string representation in orjson and json
:param value: string to re-dump
:type value: str
:return: re-dumped choices
:rtype: List[str]
"""
try:
result: Union[str, bytes] = json.dumps(json.loads(value))
except json.JSONDecodeError:
result = value
return result.decode("utf-8") if isinstance(result, bytes) else result
def validate_choices(field: "BaseField", value: Any) -> None:
"""
Validates if given value is in provided choices.

View File

@ -280,7 +280,7 @@ def copy_and_replace_m2m_through_model( # noqa: CFQ002
field.create_default_through_model()
through_class = field.through
new_meta: ormar.ModelMeta = type( # type: ignore
"Meta", (), dict(through_class.Meta.__dict__),
"Meta", (), dict(through_class.Meta.__dict__)
)
copy_name = through_class.__name__ + attrs.get("__name__", "")
copy_through = type(copy_name, (ormar.Model,), {"Meta": new_meta})
@ -566,9 +566,7 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
attrs, model_fields = extract_from_parents_definition(
base_class=base, curr_class=mcs, attrs=attrs, model_fields=model_fields
)
new_model = super().__new__( # type: ignore
mcs, name, bases, attrs
)
new_model = super().__new__(mcs, name, bases, attrs) # type: ignore
add_cached_properties(new_model)
@ -647,6 +645,6 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
access_chain=item,
)
return FieldAccessor(
source_model=cast(Type["Model"], self), field=field, access_chain=item,
source_model=cast(Type["Model"], self), field=field, access_chain=item
)
return object.__getattribute__(self, item)

View File

@ -141,7 +141,7 @@ class ExcludableMixin(RelationMixin):
return columns
@classmethod
def _update_excluded_with_related(cls, exclude: Union[Set, Dict, None],) -> Set:
def _update_excluded_with_related(cls, exclude: Union[Set, Dict, None]) -> Set:
"""
Used during generation of the dict().
To avoid cyclical references and max recurrence limit nested models have to

View File

@ -51,7 +51,7 @@ class PrefetchQueryMixin(RelationMixin):
@staticmethod
def get_column_name_for_id_extraction(
parent_model: Type["Model"], reverse: bool, related: str, use_raw: bool,
parent_model: Type["Model"], reverse: bool, related: str, use_raw: bool
) -> str:
"""
Returns name of the column that should be used to extract ids from model.

View File

@ -28,7 +28,7 @@ class PydanticMixin(RelationMixin):
@classmethod
def get_pydantic(
cls, *, include: Union[Set, Dict] = None, exclude: Union[Set, Dict] = None,
cls, *, include: Union[Set, Dict] = None, exclude: Union[Set, Dict] = None
) -> Type[pydantic.BaseModel]:
"""
Returns a pydantic model out of ormar model.

View File

@ -1,12 +1,4 @@
from typing import (
Callable,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
cast,
)
from typing import Callable, Dict, List, Optional, Set, TYPE_CHECKING, cast
from ormar import BaseField, ForeignKeyField
from ormar.models.traversible import NodeList

View File

@ -1,5 +1,15 @@
import uuid
from typing import Callable, Collection, Dict, List, Optional, Set, TYPE_CHECKING, cast
from typing import (
Any,
Callable,
Collection,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
cast,
)
import ormar
from ormar.exceptions import ModelPersistenceError
@ -93,7 +103,7 @@ class SavePrepareMixin(RelationMixin, AliasMixin):
if field.__type__ == uuid.UUID and name in model_dict:
parsers = {"string": lambda x: str(x), "hex": lambda x: "%.32x" % x.int}
uuid_format = field.column_type.uuid_format
parser = parsers.get(uuid_format, lambda x: x)
parser: Callable[..., Any] = parsers.get(uuid_format, lambda x: x)
model_dict[name] = parser(model_dict[name])
return model_dict
@ -222,7 +232,7 @@ class SavePrepareMixin(RelationMixin, AliasMixin):
@staticmethod
async def _upsert_through_model(
instance: "Model", previous_model: "Model", relation_field: "ForeignKeyField",
instance: "Model", previous_model: "Model", relation_field: "ForeignKeyField"
) -> None:
"""
Upsert through model for m2m relation.

View File

@ -1,13 +1,4 @@
from typing import (
Any,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
TypeVar,
Union,
)
from typing import Any, Dict, List, Optional, Set, TYPE_CHECKING, TypeVar, Union
import ormar.queryset # noqa I100
from ormar.exceptions import ModelPersistenceError, NoMatch

View File

@ -1,17 +1,7 @@
from typing import (
Any,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
Union,
cast,
)
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, Union, cast
try:
from sqlalchemy.engine.result import ResultProxy
from sqlalchemy.engine.result import ResultProxy # type: ignore
except ImportError: # pragma: no cover
from sqlalchemy.engine.result import Row as ResultProxy # type: ignore
@ -293,7 +283,7 @@ class ModelRow(NewBaseModel):
"""
through_name = cls.Meta.model_fields[related].through.get_name()
through_child = cls._create_through_instance(
row=row, related=related, through_name=through_name, excludable=excludable,
row=row, related=related, through_name=through_name, excludable=excludable
)
if child.__class__ != proxy_source_model:
@ -378,7 +368,7 @@ class ModelRow(NewBaseModel):
:rtype: Dict
"""
selected_columns = cls.own_table_columns(
model=cls, excludable=excludable, alias=table_prefix, use_alias=False,
model=cls, excludable=excludable, alias=table_prefix, use_alias=False
)
column_prefix = table_prefix + "_" if table_prefix else ""

View File

@ -19,17 +19,17 @@ from typing import (
cast,
)
import databases
import pydantic
import sqlalchemy
from ormar.models.utils import Extra
from pydantic import BaseModel
try:
import orjson as json
except ImportError: # pragma: no cover
import json # type: ignore
import databases
import pydantic
import sqlalchemy
from pydantic import BaseModel
import ormar # noqa I100
from ormar.exceptions import ModelError, ModelPersistenceError
@ -158,9 +158,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
# register the columns models after initialization
for related in self.extract_related_names().union(self.extract_through_names()):
model_fields[related].expand_relationship(
new_kwargs.get(related),
self,
to_register=True,
new_kwargs.get(related), self, to_register=True
)
if hasattr(self, "_init_private_attributes"):
@ -224,7 +222,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
f"need to call update_forward_refs()."
)
def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]:
def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]: # noqa: CCR001
"""
Initializes nested models.
@ -267,11 +265,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
k,
self._convert_json(
k,
model_fields[k].expand_relationship(
v,
self,
to_register=False,
)
model_fields[k].expand_relationship(v, self, to_register=False)
if k in model_fields
else (v if k in pydantic_fields else model_fields[k]),
),
@ -325,8 +320,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
self,
"_orm",
RelationsManager(
related_fields=self.extract_related_fields(),
owner=cast("Model", self),
related_fields=self.extract_related_fields(), owner=cast("Model", self)
),
)
@ -499,9 +493,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
@staticmethod
def _get_not_excluded_fields(
fields: Union[List, Set],
include: Optional[Dict],
exclude: Optional[Dict],
fields: Union[List, Set], include: Optional[Dict], exclude: Optional[Dict]
) -> List:
"""
Returns related field names applying on them include and exclude set.

View File

@ -57,7 +57,7 @@ class FilterAction(QueryAction):
Extracted in order to easily change table prefixes on complex relations.
"""
def __init__(self, filter_str: str, value: Any, model_cls: Type["Model"],) -> None:
def __init__(self, filter_str: str, value: Any, model_cls: Type["Model"]) -> None:
super().__init__(query_str=filter_str, model_cls=model_cls)
self.filter_value = value
self._escape_characters_in_clause()
@ -148,7 +148,7 @@ class FilterAction(QueryAction):
filter_value = self.filter_value
clause = getattr(self.column, op_attr)(filter_value)
clause = self._compile_clause(
clause, modifiers={"escape": "\\" if self.has_escaped_character else None},
clause, modifiers={"escape": "\\" if self.has_escaped_character else None}
)
return clause
@ -170,7 +170,7 @@ class FilterAction(QueryAction):
]
def _compile_clause(
self, clause: sqlalchemy.sql.expression.BinaryExpression, modifiers: Dict,
self, clause: sqlalchemy.sql.expression.BinaryExpression, modifiers: Dict
) -> sqlalchemy.sql.expression.TextClause:
"""
Compiles the clause to str using appropriate database dialect, replace columns

View File

@ -185,7 +185,7 @@ class QueryClause:
"""
def __init__(
self, model_cls: Type["Model"], filter_clauses: List, select_related: List,
self, model_cls: Type["Model"], filter_clauses: List, select_related: List
) -> None:
self._select_related = select_related[:]

View File

@ -1,14 +1,5 @@
from collections import OrderedDict
from typing import (
Any,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
cast,
)
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, cast
import sqlalchemy
from sqlalchemy import text
@ -102,9 +93,7 @@ class SqlJoin:
"""
return self.next_model.Meta.table
def _on_clause(
self, previous_alias: str, from_clause: str, to_clause: str,
) -> text:
def _on_clause(self, previous_alias: str, from_clause: str, to_clause: str) -> text:
"""
Receives aliases and names of both ends of the join and combines them
into one text clause used in joins.
@ -140,12 +129,7 @@ class SqlJoin:
self._process_following_joins()
return (
self.used_aliases,
self.select_from,
self.columns,
self.sorted_orders,
)
return (self.used_aliases, self.select_from, self.columns, self.sorted_orders)
def _forward_join(self) -> None:
"""
@ -269,7 +253,7 @@ class SqlJoin:
new_part = target_field.default_target_field_name() # type: ignore
return new_part
def _process_join(self,) -> None: # noqa: CFQ002
def _process_join(self) -> None: # noqa: CFQ002
"""
Resolves to and from column names and table names.
@ -316,7 +300,7 @@ class SqlJoin:
def _set_default_primary_key_order_by(self) -> None:
for order_by in self.next_model.Meta.orders_by:
clause = ormar.OrderAction(
order_str=order_by, model_cls=self.next_model, alias=self.next_alias,
order_str=order_by, model_cls=self.next_model, alias=self.next_alias
)
self.sorted_orders[clause] = clause.get_text_clause()
@ -355,8 +339,7 @@ class SqlJoin:
model = self.target_field.to
else:
alias = self.alias_manager.resolve_relation_alias(
from_model=self.target_field.owner,
relation_name=self.target_field.name,
from_model=self.target_field.owner, relation_name=self.target_field.name
)
model = self.target_field.to

View File

@ -1,13 +1,4 @@
from typing import (
Dict,
List,
Sequence,
Set,
TYPE_CHECKING,
Tuple,
Type,
cast,
)
from typing import Dict, List, Sequence, Set, TYPE_CHECKING, Tuple, Type, cast
import ormar
from ormar.queryset.clause import QueryClause
@ -39,11 +30,11 @@ def sort_models(models: List["Model"], orders_by: Dict) -> List["Model"]:
]
sort_criteria = sort_criteria[::-1]
for criteria in sort_criteria:
key, value = criteria
key_name, value = criteria
if value == "desc":
models.sort(key=lambda x: getattr(x, key), reverse=True)
models.sort(key=lambda x: getattr(x, key_name), reverse=True)
else:
models.sort(key=lambda x: getattr(x, key))
models.sort(key=lambda x: getattr(x, key_name))
return models
@ -192,7 +183,7 @@ class PrefetchQuery:
return list_of_ids
def _extract_required_ids(
self, parent_model: Type["Model"], reverse: bool, related: str,
self, parent_model: Type["Model"], reverse: bool, related: str
) -> Set:
"""
Delegates extraction of the fields to either get ids from raw sql response
@ -210,10 +201,7 @@ class PrefetchQuery:
use_raw = parent_model.get_name() not in self.models
column_name = parent_model.get_column_name_for_id_extraction(
parent_model=parent_model,
reverse=reverse,
related=related,
use_raw=use_raw,
parent_model=parent_model, reverse=reverse, related=related, use_raw=use_raw
)
if use_raw:
@ -263,7 +251,7 @@ class PrefetchQuery:
related=related,
)
qryclause = QueryClause(
model_cls=clause_target, select_related=[], filter_clauses=[],
model_cls=clause_target, select_related=[], filter_clauses=[]
)
kwargs = {f"{filter_column}__in": ids}
filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs)
@ -271,7 +259,7 @@ class PrefetchQuery:
return []
def _populate_nested_related(
self, model: "Model", prefetch_dict: Dict, orders_by: Dict,
self, model: "Model", prefetch_dict: Dict, orders_by: Dict
) -> "Model":
"""
Populates all related models children of parent model that are
@ -540,7 +528,7 @@ class PrefetchQuery:
)
def _update_already_loaded_rows( # noqa: CFQ002
self, target_field: "BaseField", prefetch_dict: Dict, orders_by: Dict,
self, target_field: "BaseField", prefetch_dict: Dict, orders_by: Dict
) -> None:
"""
Updates models that are already loaded, usually children of children.
@ -598,7 +586,7 @@ class PrefetchQuery:
for row in rows:
field_name = parent_model.get_related_field_name(target_field=target_field)
item = target_model.extract_prefixed_table_columns(
item={}, row=row, table_prefix=table_prefix, excludable=excludable,
item={}, row=row, table_prefix=table_prefix, excludable=excludable
)
item["__excluded__"] = target_model.get_names_to_exclude(
excludable=excludable, alias=exclude_prefix

View File

@ -1,8 +1,9 @@
from collections import OrderedDict
from typing import List, Optional, TYPE_CHECKING, Tuple, Type
from typing import List, Optional, TYPE_CHECKING, Tuple, Type, Union
import sqlalchemy
from sqlalchemy import text
from sqlalchemy import Table, text
from sqlalchemy.sql import Join
import ormar # noqa I100
from ormar.models.helpers.models import group_related_list
@ -41,7 +42,7 @@ class Query:
self.used_aliases: List[str] = []
self.select_from: List[str] = []
self.select_from: Union[Join, Table, List[str]] = []
self.columns = [sqlalchemy.Column]
self.order_columns = order_bys
self.sorted_orders: OrderedDict[OrderAction, text] = OrderedDict()
@ -111,7 +112,7 @@ class Query:
:rtype: sqlalchemy.sql.selectable.Select
"""
self_related_fields = self.model_cls.own_table_columns(
model=self.model_cls, excludable=self.excludable, use_alias=True,
model=self.model_cls, excludable=self.excludable, use_alias=True
)
self.columns = self.model_cls.Meta.alias_manager.prefixed_columns(
"", self.table, self_related_fields

View File

@ -247,7 +247,7 @@ class QuerySet(Generic[T]):
return self.model_meta.table
def build_select_expression(
self, limit: int = None, offset: int = None, order_bys: List = None,
self, limit: int = None, offset: int = None, order_bys: List = None
) -> sqlalchemy.sql.select:
"""
Constructs the actual database query used in the QuerySet.
@ -378,7 +378,7 @@ class QuerySet(Generic[T]):
]
related = sorted(list(set(list(self._select_related) + related)))
return self.rebuild_self(select_related=related,)
return self.rebuild_self(select_related=related)
def select_all(self, follow: bool = False) -> "QuerySet[T]":
"""
@ -404,7 +404,7 @@ class QuerySet(Generic[T]):
relations = list(self.model.extract_related_names())
if follow:
relations = self.model._iterate_related_models()
return self.rebuild_self(select_related=relations,)
return self.rebuild_self(select_related=relations)
def prefetch_related(
self, related: Union[List, str, FieldAccessor]
@ -434,7 +434,7 @@ class QuerySet(Generic[T]):
]
related = list(set(list(self._prefetch_related) + related))
return self.rebuild_self(prefetch_related=related,)
return self.rebuild_self(prefetch_related=related)
def fields(
self, columns: Union[List, str, Set, Dict], _is_exclude: bool = False
@ -490,7 +490,7 @@ class QuerySet(Generic[T]):
is_exclude=_is_exclude,
)
return self.rebuild_self(excludable=excludable,)
return self.rebuild_self(excludable=excludable)
def exclude_fields(self, columns: Union[List, str, Set, Dict]) -> "QuerySet[T]":
"""
@ -564,7 +564,7 @@ class QuerySet(Generic[T]):
]
order_bys = self.order_bys + [x for x in orders_by if x not in self.order_bys]
return self.rebuild_self(order_bys=order_bys,)
return self.rebuild_self(order_bys=order_bys)
async def values(
self,
@ -821,7 +821,7 @@ class QuerySet(Generic[T]):
limit_count = page_size
query_offset = (page - 1) * page_size
return self.rebuild_self(limit_count=limit_count, offset=query_offset,)
return self.rebuild_self(limit_count=limit_count, offset=query_offset)
def limit(self, limit_count: int, limit_raw_sql: bool = None) -> "QuerySet[T]":
"""
@ -838,7 +838,7 @@ class QuerySet(Generic[T]):
:rtype: QuerySet
"""
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(limit_count=limit_count, limit_raw_sql=limit_raw_sql,)
return self.rebuild_self(limit_count=limit_count, limit_raw_sql=limit_raw_sql)
def offset(self, offset: int, limit_raw_sql: bool = None) -> "QuerySet[T]":
"""
@ -855,7 +855,7 @@ class QuerySet(Generic[T]):
:rtype: QuerySet
"""
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(offset=offset, limit_raw_sql=limit_raw_sql,)
return self.rebuild_self(offset=offset, limit_raw_sql=limit_raw_sql)
async def first(self, *args: Any, **kwargs: Any) -> "T":
"""

View File

@ -214,21 +214,22 @@ def extract_nested_models( # noqa: CCR001
follow = [rel for rel in model_type.extract_related_names() if rel in select_dict]
for related in follow:
child = getattr(model, related)
if child:
target_model = model_type.Meta.model_fields[related].to
if isinstance(child, list):
extracted.setdefault(target_model.get_name(), []).extend(child)
if select_dict[related] is not Ellipsis:
for sub_child in child:
extract_nested_models(
sub_child, target_model, select_dict[related], extracted,
)
else:
extracted.setdefault(target_model.get_name(), []).append(child)
if select_dict[related] is not Ellipsis:
if not child:
continue
target_model = model_type.Meta.model_fields[related].to
if isinstance(child, list):
extracted.setdefault(target_model.get_name(), []).extend(child)
if select_dict[related] is not Ellipsis:
for sub_child in child:
extract_nested_models(
child, target_model, select_dict[related], extracted,
sub_child, target_model, select_dict[related], extracted
)
else:
extracted.setdefault(target_model.get_name(), []).append(child)
if select_dict[related] is not Ellipsis:
extract_nested_models(
child, target_model, select_dict[related], extracted
)
def extract_models_to_dict_of_lists(

View File

@ -99,7 +99,7 @@ class AliasManager:
return table.alias(f"{alias}_{table.name}")
def add_relation_type(
self, source_model: Type["Model"], relation_name: str, reverse_name: str = None,
self, source_model: Type["Model"], relation_name: str, reverse_name: str = None
) -> None:
"""
Registers the relations defined in ormar models.

View File

@ -43,7 +43,7 @@ class QuerysetProxy(Generic[T]):
type_: "RelationType",
qryset: "QuerySet[T]" = None,
) -> None:
self.relation: Relation = relation
self.relation: "Relation" = relation
self._queryset: Optional["QuerySet[T]"] = qryset
self.type_: "RelationType" = type_
self._owner: Union[CallableProxyType, "Model"] = self.relation.manager.owner
@ -294,7 +294,7 @@ class QuerysetProxy(Generic[T]):
:type fields: Union[List, str, Set, Dict]
"""
return await self.queryset.values(
fields=fields, exclude_through=exclude_through,
fields=fields, exclude_through=exclude_through
)
async def values_list(
@ -479,8 +479,7 @@ class QuerysetProxy(Generic[T]):
await child.update(**kwargs) # type: ignore
if self.type_ == ormar.RelationType.MULTIPLE and through_kwargs:
await self.update_through_instance(
child=child, # type: ignore
**through_kwargs,
child=child, **through_kwargs # type: ignore
)
return len(children)

View File

@ -57,7 +57,7 @@ class RelationsManager:
return None # pragma nocover
@staticmethod
def add(parent: "Model", child: "Model", field: "ForeignKeyField",) -> None:
def add(parent: "Model", child: "Model", field: "ForeignKeyField") -> None:
"""
Adds relation on both sides -> meaning on both child and parent models.
One side of the relation is always weakref proxy to avoid circular refs.
@ -73,7 +73,7 @@ class RelationsManager:
:param field: field with relation definition
:type field: ForeignKeyField
"""
(parent, child, child_name, to_name,) = get_relations_sides_and_names(
(parent, child, child_name, to_name) = get_relations_sides_and_names(
field, parent, child
)

View File

@ -8,7 +8,7 @@ if TYPE_CHECKING: # pragma no cover
def get_relations_sides_and_names(
to_field: ForeignKeyField, parent: "Model", child: "Model",
to_field: ForeignKeyField, parent: "Model", child: "Model"
) -> Tuple["Model", "Model", str, str]:
"""
Determines the names of child and parent relations names, as well as

2421
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

153
pyproject.toml Normal file
View File

@ -0,0 +1,153 @@
[tool.poetry]
name = "ormar"
version = "0.10.20"
description = "A simple async ORM with fastapi in mind and pydantic validation."
authors = ["Radosław Drążkiewicz <collerek@gmail.com>"]
license = "MIT"
readme = "README.md"
homepage = "https://github.com/collerek/ormar"
repository = "https://github.com/collerek/ormar"
documentation = "https://collerek.github.io/ormar/"
packages = [
{ include="ormar" }
]
keywords = [
"orm",
"sqlalchemy",
"fastapi",
"pydantic",
"databases",
"async",
"alembic",
]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Framework :: AsyncIO",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
]
[tool.poetry.dependencies]
python = "^3.6.2"
databases = ">=0.3.2,<0.5.3"
pydantic = ">=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2"
SQLAlchemy = ">=1.3.18,<1.4.26"
asyncpg = { version = "^0.24.0", optional = true }
psycopg2-binary = { version = "^2.9.1", optional = true }
aiomysql = { version = "^0.0.21", optional = true }
aiosqlite = { version = "^0.17.0", optional = true }
cryptography = { version = "^35.0.0", optional = true }
[tool.poetry.dependencies.orjson]
version = "^3.6.4"
optional = true
python = ">=3.7"
[tool.poetry.dependencies.typing-extensions]
version = "^3.7"
python = "<3.8"
[tool.poetry.dependencies.importlib-metadata]
version = ">=3.1"
python = "<3.8"
[tool.poetry.dev-dependencies]
# Async database driversy
aiomysql = "^0.0.21"
aiosqlite = "^0.17.0"
aiopg = "^1.3.2"
asyncpg = "^0.24.0"
# Sync database drivers for standard tooling around setup/teardown/migrations.
psycopg2-binary = "^2.9.1"
mysqlclient = "^2.0.3"
PyMySQL = ">=0.9,<=0.9.3"
# Testing
pytest = "^6.2.5"
pytest-cov = "^3.0.0"
codecov = "^2.1.12"
pytest-asyncio = "^0.15.1"
fastapi = "^0.70.0"
flake8 = "^3.9.2"
flake8-black = "^0.2.3"
flake8-bugbear = "^21.9.2"
flake8-import-order = "^0.18.1"
flake8-bandit = "^2.1.2"
flake8-builtins = "^1.5.3"
flake8-variables-names = "^0.0.4"
flake8-cognitive-complexity = "^0.1.0"
flake8-functions = "^0.0.6"
flake8-expression-complexity = "^0.0.9"
# types
mypy = "^0.910"
types-ujson = "^0.1.1"
types-PyMySQL = "^1.0.2"
types-ipaddress = "^1.0.0"
types-enum34 = "^1.1.0"
types-cryptography = "^3.3.5"
types-orjson = "^3.6.0"
types-aiofiles = "^0.1.9"
types-pkg-resources = "^0.1.3"
types-requests = "^2.25.9"
types-toml = "^0.10.0"
types-dataclasses = { version = "^0.1.7", markers = "python_version < '3.7'" }
# Documantation
mkdocs = "^1.2.2"
mkdocs-material = "^7.3.2"
mkdocs-material-extensions = "^1.0.3"
pydoc-markdown = { version = "^4.3.2", markers = "python_version > '3.7'" }
dataclasses = { version = ">=0.6.0,<0.8 || >0.8,<1.0.0" }
# Performance testing
yappi = "^1.3.3"
[tool.poetry.extras]
postgresql = ["asyncpg", "psycopg2-binary"]
postgres = ["asyncpg", "psycopg2-binary"]
mysql = ["aiomysql"]
sqlite = ["sqlite"]
orjson = ["orjson"]
crypto = ["cryptography"]
dev = [
"asyncpg",
"psycopg2-binary",
"aiomysql",
"sqlite",
"orjson",
"cryptography",
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"
[tool.mypy]
# TODO: Enable mypy plugin after pydantic release supporting toml file
disallow_untyped_calls = true
disallow_untyped_defs = true
disallow_incomplete_defs = true
[[tool.mypy.overrides]]
module = "tests.*"
disallow_untyped_calls = false
disallow_untyped_defs = false
disallow_incomplete_defs = false
[[tool.mypy.overrides]]
module = ["sqlalchemy.*", "asyncpg"]
ignore_missing_imports = true

View File

@ -1,58 +0,0 @@
databases>=0.3.2,<0.5.3
pydantic >=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2
sqlalchemy>=1.3.18,<1.4.26
typing_extensions>=3.7,<3.10.0.3
orjson
cryptography
# Async database driversy
aiomysql
aiosqlite
aiopg
asyncpg
# Sync database drivers for standard tooling around setup/teardown/migrations.
pymysql
psycopg2-binary
mysqlclient
# Testing
pytest
pytest-cov
codecov
pytest-asyncio
fastapi
flake8
flake8-black
flake8-bugbear
flake8-import-order
flake8-bandit
flake8-annotations
flake8-builtins
flake8-variables-names
flake8-cognitive-complexity
flake8-functions
flake8-expression-complexity
# types
mypy
types-ujson
types-PyMySQL
types-ipaddress
types-enum34
types-cryptography
types-orjson
types-aiofiles
types-pkg_resources
types-requests
types-toml
# Documantation
mkdocs
mkdocs-material
mkdocs-material-extensions
pydoc-markdown
# Performance testing
yappi

View File

@ -1,93 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
from setuptools import setup
PACKAGE = "ormar"
URL = "https://github.com/collerek/ormar"
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
with open(os.path.join(package, "__init__.py")) as f:
return re.search("__version__ = ['\"]([^'\"]+)['\"]", f.read()).group(1)
def get_long_description():
"""
Return the README.
"""
with open("README.md", encoding="utf8") as f:
return f.read()
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [
dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, "__init__.py"))
]
setup(
name=PACKAGE,
version=get_version(PACKAGE),
url=URL,
license="MIT",
description="A simple async ORM with fastapi in mind and pydantic validation.",
long_description=get_long_description(),
long_description_content_type="text/markdown",
keywords=[
"orm",
"sqlalchemy",
"fastapi",
"pydantic",
"databases",
"async",
"alembic",
],
author="Radosław Drążkiewicz",
author_email="collerek@gmail.com",
packages=get_packages(PACKAGE),
package_data={PACKAGE: ["py.typed"]},
include_package_data=True,
zip_safe=False,
python_requires=">=3.6",
data_files=[("", ["LICENSE.md"])],
install_requires=[
"databases>=0.3.2,<0.5.3",
"pydantic>=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2",
"sqlalchemy>=1.3.18,<1.4.26",
"typing_extensions>=3.7,<3.10.0.3",
],
extras_require={
"postgresql": ["asyncpg", "psycopg2-binary"],
"mysql": ["aiomysql", "pymysql"],
"sqlite": ["aiosqlite"],
"orjson": ["orjson"],
"crypto": ["cryptography"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Framework :: AsyncIO",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
)

View File

@ -25,7 +25,7 @@ class BaseMeta(ormar.ModelMeta):
default_fernet = dict(
encrypt_secret="asd123", encrypt_backend=ormar.EncryptBackends.FERNET,
encrypt_secret="asd123", encrypt_backend=ormar.EncryptBackends.FERNET
)

View File

@ -135,7 +135,7 @@ class Project(orm.Model):
type: str = orm.String(max_length=10, default="cs")
target_branch_name: str = orm.String(max_length=100, default="master")
header: str = orm.String(max_length=250, default="")
jira_url: str = orm.String(max_length=500,)
jira_url: str = orm.String(max_length=500)
changelog_file: str = orm.String(max_length=250, default="")
version_file: str = orm.String(max_length=250, default="")

View File

@ -106,13 +106,7 @@ def compare_results_include(excludable):
def test_excluding_fields_from_list():
fields = [
"gearbox_type",
"gears",
"aircon_type",
"year",
"manufacturer__founded",
]
fields = ["gearbox_type", "gears", "aircon_type", "year", "manufacturer__founded"]
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable)
@ -174,7 +168,7 @@ def test_nested_includes_from_dict():
fields = {
"id": ...,
"name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name": ...}},},
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name": ...}}},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False)
@ -185,7 +179,7 @@ def test_nested_includes_from_dict_with_set():
fields = {
"id": ...,
"name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name"}},},
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name"}}},
}
excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False)

View File

@ -167,11 +167,7 @@ def test_excluding_fields_in_endpoints():
assert created_user.pk is not None
assert created_user.password is None
user2 = {
"email": "test@domain.com",
"first_name": "John",
"last_name": "Doe",
}
user2 = {"email": "test@domain.com", "first_name": "John", "last_name": "Doe"}
response = client.post("/users/", json=user2)
created_user = User(**response.json())

View File

@ -1,7 +1,6 @@
import datetime
import decimal
import uuid
from base64 import b64encode
from enum import Enum
import databases
@ -60,7 +59,7 @@ class Organisation(ormar.Model):
random_decimal: decimal.Decimal = ormar.Decimal(
scale=2, precision=4, choices=[decimal.Decimal(12.4), decimal.Decimal(58.2)]
)
random_json: pydantic.Json = ormar.JSON(choices=["aa", '{"aa":"bb"}'])
random_json: pydantic.Json = ormar.JSON(choices=["aa", '{"aa": "bb"}'])
random_uuid: uuid.UUID = ormar.UUID(choices=[uuid1, uuid2])
enum_string: str = ormar.String(max_length=100, choices=list(EnumTest))
blob_col: bytes = ormar.LargeBinary(max_length=100000, choices=[blob, blob2])
@ -116,7 +115,7 @@ def test_all_endpoints():
"expire_datetime": "2022-05-01T12:30:00",
"random_val": 3.5,
"random_decimal": 12.4,
"random_json": '{"aa":"bb"}',
"random_json": '{"aa": "bb"}',
"random_uuid": str(uuid1),
"enum_string": EnumTest.val1.value,
"blob_col": blob.decode("utf-8"),

View File

@ -131,10 +131,6 @@ def test_all_endpoints():
assert items[0].name == "New name"
assert items[0].category.name is None
loop = asyncio.get_event_loop()
loop.run_until_complete(items[0].category.load())
assert items[0].category.name is not None
response = client.get(f"/items/{item.pk}")
new_item = Item(**response.json())
assert new_item == item

View File

@ -133,9 +133,7 @@ class Car2(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person, related_name="owned")
co_owners: List[Person] = ormar.ManyToMany(
Person, related_name="coowned",
)
co_owners: List[Person] = ormar.ManyToMany(Person, related_name="coowned")
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
@ -204,7 +202,7 @@ def test_field_redefining_in_concrete_models():
assert changed_field.get_alias() == "creation_date"
assert any(x.name == "creation_date" for x in RedefinedField.Meta.table.columns)
assert isinstance(
RedefinedField.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String,
RedefinedField.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String
)

View File

@ -95,8 +95,7 @@ def test_field_redefining_in_second_raises_error():
)
assert any(x.name == "creation_date" for x in RedefinedField2.Meta.table.columns)
assert isinstance(
RedefinedField2.Meta.table.columns["creation_date"].type,
sa.sql.sqltypes.String,
RedefinedField2.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String
)

View File

@ -62,18 +62,18 @@ async def cleanup():
@pytest.mark.asyncio
async def test_creating_a_position(cleanup):
async with database:
instance = PositionOrm(name="my_pos", x=1.0, y=2.0, degrees=3.0,)
instance = PositionOrm(name="my_pos", x=1.0, y=2.0, degrees=3.0)
await instance.save()
assert instance.saved
assert instance.name == "my_pos"
instance2 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0,)
instance2 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance2.save()
assert instance2.saved
assert instance2.name is not None
assert len(instance2.name) == 12
instance3 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0,)
instance3 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance3.save()
assert instance3.saved
assert instance3.name is not None

View File

@ -27,7 +27,7 @@ class Mol(ormar.Model):
class Meta(BaseMeta):
tablename = "mols"
id: str = ormar.UUID(primary_key=True, index=True, uuid_format="hex")
id: uuid.UUID = ormar.UUID(primary_key=True, index=True, uuid_format="hex")
smiles: str = ormar.String(nullable=False, unique=True, max_length=256)
def __init__(self, **kwargs):

View File

@ -117,7 +117,7 @@ def test_operator_return_proper_filter_action(method, expected, expected_value):
}
@pytest.mark.parametrize("method, expected_direction", [("asc", ""), ("desc", "desc"),])
@pytest.mark.parametrize("method, expected_direction", [("asc", ""), ("desc", "desc")])
def test_operator_return_proper_order_action(method, expected_direction):
action = getattr(Product.name, method)()
assert action.source_model == Product

View File

@ -126,7 +126,7 @@ class Country(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(
max_length=9, choices=country_name_choices, default="Canada",
max_length=9, choices=country_name_choices, default="Canada"
)
taxed: bool = ormar.Boolean(choices=country_taxed_choices, default=True)
country_code: int = ormar.Integer(

View File

@ -19,9 +19,9 @@ class OverwriteTest(ormar.Model):
database = database
id: int = ormar.Integer(primary_key=True)
my_int: str = ormar.Integer(overwrite_pydantic_type=PositiveInt)
my_int: int = ormar.Integer(overwrite_pydantic_type=PositiveInt)
constraint_dict: Json = ormar.JSON(
overwrite_pydantic_type=Optional[Json[Dict[str, int]]]
overwrite_pydantic_type=Optional[Json[Dict[str, int]]] # type: ignore
)

View File

@ -24,7 +24,7 @@ class ModelTest(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example.com"
url: HttpUrl = "https://www.example.com" # type: ignore
number: Optional[PaymentCardNumber]
@ -47,7 +47,7 @@ class ModelTest2(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example2.com"
url: HttpUrl = "https://www.example2.com" # type: ignore
number: PaymentCardNumber = Field(default_factory=get_number)
@ -67,7 +67,7 @@ class ModelTest3(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200)
url: HttpUrl = "https://www.example3.com"
url: HttpUrl = "https://www.example3.com" # type: ignore
number: PaymentCardNumber
pydantic_test: PydanticTest

View File

@ -35,7 +35,7 @@ class SecondaryModel(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models",
PrimaryModel, related_name="secondary_models"
)

View File

@ -31,7 +31,7 @@ class DataSourceTable(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, index=True)
source: Optional[DataSource] = ormar.ForeignKey(
DataSource, name="source_id", related_name="tables", ondelete="CASCADE",
DataSource, name="source_id", related_name="tables", ondelete="CASCADE"
)
@ -43,7 +43,7 @@ class DataSourceTableColumn(ormar.Model):
name: str = ormar.String(max_length=200, index=True)
data_type: str = ormar.String(max_length=200)
table: Optional[DataSourceTable] = ormar.ForeignKey(
DataSourceTable, name="table_id", related_name="columns", ondelete="CASCADE",
DataSourceTable, name="table_id", related_name="columns", ondelete="CASCADE"
)

View File

@ -130,7 +130,7 @@ async def test_or_filters():
)
)
& (Book.title.startswith("The"))
),
)
)
.all()
)

View File

@ -95,6 +95,6 @@ async def test_add_students():
assert user.attending is not None
assert len(user.attending) > 0
query = Session.objects.prefetch_related(["students", "teacher",])
query = Session.objects.prefetch_related(["students", "teacher"])
sessions = await query.all()
assert len(sessions) == 5

View File

@ -56,7 +56,7 @@ class SecondaryModel(ormar.Model):
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models",
PrimaryModel, related_name="secondary_models"
)

View File

@ -6,14 +6,7 @@ import pytest
import sqlalchemy
import ormar
from ormar import (
post_delete,
post_save,
post_update,
pre_delete,
pre_save,
pre_update,
)
from ormar import post_delete, post_save, post_update, pre_delete, pre_save, pre_update
from ormar.exceptions import SignalDefinitionError
from tests.settings import DATABASE_URL