Added poetry integration.

Description:
* Fixed github actions;
* Removed requirements.txt;
* Fixed CONTRIBUTING.md;
* Fixed black and flake8.

Signed-off-by: Pavel <win10@list.ru>
This commit is contained in:
Pavel
2021-10-08 15:57:22 +04:00
parent 80c6ff38a1
commit b2541bed1e
52 changed files with 2685 additions and 377 deletions

View File

@ -1,5 +1,5 @@
[flake8] [flake8]
ignore = ANN101, ANN102, W503, S101 ignore = ANN101, ANN102, W503, S101, CFQ004
max-complexity = 8 max-complexity = 8
max-line-length = 88 max-line-length = 88
import-order-style = pycharm import-order-style = pycharm

View File

@ -20,12 +20,13 @@ jobs:
python-version: '3.x' python-version: '3.x'
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install --upgrade pip python -m pip install --upgrade poetry
pip install setuptools wheel twine poetry install --no-dev
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Build and publish - name: Build and publish
env: env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} POETRY_HTTP_BASIC_PYPI_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} POETRY_HTTP_BASIC_PYPI_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: | run: |
python setup.py sdist bdist_wheel poetry publish --build
twine upload dist/*

View File

@ -50,8 +50,10 @@ jobs:
python-version: ${{ matrix.python-version }} python-version: ${{ matrix.python-version }}
- name: Install dependencies - name: Install dependencies
run: | run: |
python -m pip install pip==21.0.1 python -m pip install poetry==1.1.11
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi poetry install
env:
POETRY_VIRTUALENVS_CREATE: false
- name: Run mysql - name: Run mysql
env: env:
DATABASE_URL: "mysql://username:password@127.0.0.1:3306/testsuite" DATABASE_URL: "mysql://username:password@127.0.0.1:3306/testsuite"

5
.vscode/settings.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"python.linting.flake8Enabled": true,
"python.linting.enabled": true,
"python.formatting.provider": "black"
}

View File

@ -22,38 +22,33 @@ It should be quite straight forward to get started and create a Pull Request.
To make contributing as easy and fast as possible, you'll want to run tests and linting locally. To make contributing as easy and fast as possible, you'll want to run tests and linting locally.
You'll need to have **python 3.6**, **3.7**, or **3.8**, **virtualenv**, and **git** installed. You'll need to have **python 3.6.2**, **3.7**, or **3.8**, **poetry**, and **git** installed.
```bash ```bash
# 1. clone your fork and cd into the repo directory # 1. clone your fork and cd into the repo directory
git clone git@github.com:<your username>/ormar.git git clone git@github.com:<your username>/ormar.git
cd ormar cd ormar
# 2. Set up a virtualenv for running tests # 2. Install ormar, dependencies and test dependencies
virtualenv -p `which python3.7` env poetry install -E dev
source env/bin/activate
# (or however you prefer to setup a python environment, 3.6 will work too)
# 3. Install ormar, dependencies and test dependencies # 3. Checkout a new branch and make your changes
pip install -r requirements.txt
# 4. Checkout a new branch and make your changes
git checkout -b my-new-feature-branch git checkout -b my-new-feature-branch
# make your changes... # make your changes...
# 5. Formatting and linting # 4. Formatting and linting
# ormar uses black for formatting, flake8 for linting and mypy for type hints check # ormar uses black for formatting, flake8 for linting and mypy for type hints check
# run all of the following as all those calls will be run on travis after every push # run all of the following as all those calls will be run on travis after every push
black ormar tests black ormar tests
flake8 ormar flake8 ormar
mypy --config-file mypy.ini ormar tests mypy ormar tests
# 6. Run tests # 5. Run tests
# on localhost all tests are run against sglite backend # on localhost all tests are run against sglite backend
# rest of the backends will be checked after push # rest of the backends will be checked after push
pytest -svv --cov=ormar --cov=tests --cov-fail-under=100 --cov-report=term-missing pytest -svv --cov=ormar --cov=tests --cov-fail-under=100 --cov-report=term-missing
# 7. Build documentation # 6. Build documentation
mkdocs build mkdocs build
# if you have changed the documentation make sure it builds successfully # if you have changed the documentation make sure it builds successfully
# you can also use `mkdocs serve` to serve the documentation at localhost:8000 # you can also use `mkdocs serve` to serve the documentation at localhost:8000

View File

@ -19,6 +19,10 @@ snakes, and ormar(e) in italian which means cabinet.
And what's a better name for python ORM than snakes cabinet :) And what's a better name for python ORM than snakes cabinet :)
""" """
try:
from importlib import metadata
except ImportError: # pragma: no cover
import importlib_metadata as metadata # type: ignore
from ormar.protocols import QuerySetProtocol, RelationProtocol # noqa: I100 from ormar.protocols import QuerySetProtocol, RelationProtocol # noqa: I100
from ormar.decorators import ( # noqa: I100 from ormar.decorators import ( # noqa: I100
post_delete, post_delete,
@ -64,7 +68,7 @@ from ormar.fields import (
UUID, UUID,
UniqueColumns, UniqueColumns,
) # noqa: I100 ) # noqa: I100
from ormar.models import ExcludableItems, Model, Extra from ormar.models import ExcludableItems, Extra, Model
from ormar.models.metaclass import ModelMeta from ormar.models.metaclass import ModelMeta
from ormar.queryset import OrderAction, QuerySet, and_, or_ from ormar.queryset import OrderAction, QuerySet, and_, or_
from ormar.relations import RelationType from ormar.relations import RelationType
@ -78,7 +82,7 @@ class UndefinedType: # pragma no cover
Undefined = UndefinedType() Undefined = UndefinedType()
__version__ = "0.10.20" __version__ = metadata.version("ormar")
__all__ = [ __all__ = [
"Integer", "Integer",
"BigInteger", "BigInteger",

View File

@ -41,7 +41,7 @@ def receiver(
return _decorator return _decorator
def post_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable: def post_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
""" """
Connect given function to all senders for post_save signal. Connect given function to all senders for post_save signal.
@ -54,7 +54,7 @@ def post_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable:
return receiver(signal="post_save", senders=senders) return receiver(signal="post_save", senders=senders)
def post_update(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable: def post_update(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
""" """
Connect given function to all senders for post_update signal. Connect given function to all senders for post_update signal.
@ -67,7 +67,7 @@ def post_update(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable
return receiver(signal="post_update", senders=senders) return receiver(signal="post_update", senders=senders)
def post_delete(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable: def post_delete(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
""" """
Connect given function to all senders for post_delete signal. Connect given function to all senders for post_delete signal.
@ -80,7 +80,7 @@ def post_delete(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable
return receiver(signal="post_delete", senders=senders) return receiver(signal="post_delete", senders=senders)
def pre_save(senders: Union[Type["Model"], List[Type["Model"]]],) -> Callable: def pre_save(senders: Union[Type["Model"], List[Type["Model"]]]) -> Callable:
""" """
Connect given function to all senders for pre_save signal. Connect given function to all senders for pre_save signal.

View File

@ -92,7 +92,7 @@ def create_dummy_model(
def populate_fk_params_based_on_to_model( def populate_fk_params_based_on_to_model(
to: Type["T"], nullable: bool, onupdate: str = None, ondelete: str = None, to: Type["T"], nullable: bool, onupdate: str = None, ondelete: str = None
) -> Tuple[Any, List, Any]: ) -> Tuple[Any, List, Any]:
""" """
Based on target to model to which relation leads to populates the type of the Based on target to model to which relation leads to populates the type of the
@ -347,9 +347,7 @@ class ForeignKeyField(BaseField):
""" """
if self.to.__class__ == ForwardRef: if self.to.__class__ == ForwardRef:
self.to = evaluate_forwardref( self.to = evaluate_forwardref(
self.to, # type: ignore self.to, globalns, localns or None # type: ignore
globalns,
localns or None,
) )
( (
self.__type__, self.__type__,
@ -363,7 +361,7 @@ class ForeignKeyField(BaseField):
) )
def _extract_model_from_sequence( def _extract_model_from_sequence(
self, value: List, child: "Model", to_register: bool, self, value: List, child: "Model", to_register: bool
) -> List["Model"]: ) -> List["Model"]:
""" """
Takes a list of Models and registers them on parent. Takes a list of Models and registers them on parent.
@ -382,13 +380,13 @@ class ForeignKeyField(BaseField):
""" """
return [ return [
self.expand_relationship( # type: ignore self.expand_relationship( # type: ignore
value=val, child=child, to_register=to_register, value=val, child=child, to_register=to_register
) )
for val in value for val in value
] ]
def _register_existing_model( def _register_existing_model(
self, value: "Model", child: "Model", to_register: bool, self, value: "Model", child: "Model", to_register: bool
) -> "Model": ) -> "Model":
""" """
Takes already created instance and registers it for parent. Takes already created instance and registers it for parent.
@ -479,9 +477,7 @@ class ForeignKeyField(BaseField):
:param child: child model :param child: child model
:type child: Model class :type child: Model class
""" """
model._orm.add( model._orm.add(parent=model, child=child, field=self)
parent=model, child=child, field=self,
)
def has_unresolved_forward_refs(self) -> bool: def has_unresolved_forward_refs(self) -> bool:
""" """

View File

@ -223,20 +223,16 @@ class ManyToManyField(ForeignKeyField, ormar.QuerySetProtocol, ormar.RelationPro
""" """
if self.to.__class__ == ForwardRef: if self.to.__class__ == ForwardRef:
self.to = evaluate_forwardref( self.to = evaluate_forwardref(
self.to, # type: ignore self.to, globalns, localns or None # type: ignore
globalns,
localns or None,
) )
(self.__type__, self.column_type,) = populate_m2m_params_based_on_to_model( (self.__type__, self.column_type) = populate_m2m_params_based_on_to_model(
to=self.to, nullable=self.nullable, to=self.to, nullable=self.nullable
) )
if self.through.__class__ == ForwardRef: if self.through.__class__ == ForwardRef:
self.through = evaluate_forwardref( self.through = evaluate_forwardref(
self.through, # type: ignore self.through, globalns, localns or None # type: ignore
globalns,
localns or None,
) )
forbid_through_relations(self.through) forbid_through_relations(self.through)

View File

@ -254,9 +254,7 @@ class Text(ModelFieldFactory, str):
_type = str _type = str
_sample = "text" _sample = "text"
def __new__( # type: ignore def __new__(cls, **kwargs: Any) -> BaseField: # type: ignore
cls, **kwargs: Any
) -> BaseField:
kwargs = { kwargs = {
**kwargs, **kwargs,
**{ **{

View File

@ -15,7 +15,7 @@ if TYPE_CHECKING: # pragma no cover
def Through( # noqa CFQ002 def Through( # noqa CFQ002
to: "ToType", *, name: str = None, related_name: str = None, **kwargs: Any, to: "ToType", *, name: str = None, related_name: str = None, **kwargs: Any
) -> Any: ) -> Any:
""" """
Despite a name it's a function that returns constructed ThroughField. Despite a name it's a function that returns constructed ThroughField.

View File

@ -29,7 +29,7 @@ def is_field_an_forward_ref(field: "BaseField") -> bool:
) )
def populate_default_options_values( def populate_default_options_values( # noqa: CCR001
new_model: Type["Model"], model_fields: Dict new_model: Type["Model"], model_fields: Dict
) -> None: ) -> None:
""" """

View File

@ -280,7 +280,7 @@ def copy_and_replace_m2m_through_model( # noqa: CFQ002
field.create_default_through_model() field.create_default_through_model()
through_class = field.through through_class = field.through
new_meta: ormar.ModelMeta = type( # type: ignore new_meta: ormar.ModelMeta = type( # type: ignore
"Meta", (), dict(through_class.Meta.__dict__), "Meta", (), dict(through_class.Meta.__dict__)
) )
copy_name = through_class.__name__ + attrs.get("__name__", "") copy_name = through_class.__name__ + attrs.get("__name__", "")
copy_through = type(copy_name, (ormar.Model,), {"Meta": new_meta}) copy_through = type(copy_name, (ormar.Model,), {"Meta": new_meta})
@ -566,9 +566,7 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
attrs, model_fields = extract_from_parents_definition( attrs, model_fields = extract_from_parents_definition(
base_class=base, curr_class=mcs, attrs=attrs, model_fields=model_fields base_class=base, curr_class=mcs, attrs=attrs, model_fields=model_fields
) )
new_model = super().__new__( # type: ignore new_model = super().__new__(mcs, name, bases, attrs) # type: ignore
mcs, name, bases, attrs
)
add_cached_properties(new_model) add_cached_properties(new_model)
@ -647,6 +645,6 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
access_chain=item, access_chain=item,
) )
return FieldAccessor( return FieldAccessor(
source_model=cast(Type["Model"], self), field=field, access_chain=item, source_model=cast(Type["Model"], self), field=field, access_chain=item
) )
return object.__getattribute__(self, item) return object.__getattribute__(self, item)

View File

@ -141,7 +141,7 @@ class ExcludableMixin(RelationMixin):
return columns return columns
@classmethod @classmethod
def _update_excluded_with_related(cls, exclude: Union[Set, Dict, None],) -> Set: def _update_excluded_with_related(cls, exclude: Union[Set, Dict, None]) -> Set:
""" """
Used during generation of the dict(). Used during generation of the dict().
To avoid cyclical references and max recurrence limit nested models have to To avoid cyclical references and max recurrence limit nested models have to

View File

@ -51,7 +51,7 @@ class PrefetchQueryMixin(RelationMixin):
@staticmethod @staticmethod
def get_column_name_for_id_extraction( def get_column_name_for_id_extraction(
parent_model: Type["Model"], reverse: bool, related: str, use_raw: bool, parent_model: Type["Model"], reverse: bool, related: str, use_raw: bool
) -> str: ) -> str:
""" """
Returns name of the column that should be used to extract ids from model. Returns name of the column that should be used to extract ids from model.

View File

@ -28,7 +28,7 @@ class PydanticMixin(RelationMixin):
@classmethod @classmethod
def get_pydantic( def get_pydantic(
cls, *, include: Union[Set, Dict] = None, exclude: Union[Set, Dict] = None, cls, *, include: Union[Set, Dict] = None, exclude: Union[Set, Dict] = None
) -> Type[pydantic.BaseModel]: ) -> Type[pydantic.BaseModel]:
""" """
Returns a pydantic model out of ormar model. Returns a pydantic model out of ormar model.

View File

@ -1,12 +1,4 @@
from typing import ( from typing import Callable, Dict, List, Optional, Set, TYPE_CHECKING, cast
Callable,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
cast,
)
from ormar import BaseField, ForeignKeyField from ormar import BaseField, ForeignKeyField
from ormar.models.traversible import NodeList from ormar.models.traversible import NodeList

View File

@ -222,7 +222,7 @@ class SavePrepareMixin(RelationMixin, AliasMixin):
@staticmethod @staticmethod
async def _upsert_through_model( async def _upsert_through_model(
instance: "Model", previous_model: "Model", relation_field: "ForeignKeyField", instance: "Model", previous_model: "Model", relation_field: "ForeignKeyField"
) -> None: ) -> None:
""" """
Upsert through model for m2m relation. Upsert through model for m2m relation.

View File

@ -1,13 +1,4 @@
from typing import ( from typing import Any, Dict, List, Optional, Set, TYPE_CHECKING, TypeVar, Union
Any,
Dict,
List,
Optional,
Set,
TYPE_CHECKING,
TypeVar,
Union,
)
import ormar.queryset # noqa I100 import ormar.queryset # noqa I100
from ormar.exceptions import ModelPersistenceError, NoMatch from ormar.exceptions import ModelPersistenceError, NoMatch

View File

@ -1,14 +1,4 @@
from typing import ( from typing import Any, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, Union, cast
Any,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
Union,
cast,
)
try: try:
from sqlalchemy.engine.result import ResultProxy from sqlalchemy.engine.result import ResultProxy
@ -293,7 +283,7 @@ class ModelRow(NewBaseModel):
""" """
through_name = cls.Meta.model_fields[related].through.get_name() through_name = cls.Meta.model_fields[related].through.get_name()
through_child = cls._create_through_instance( through_child = cls._create_through_instance(
row=row, related=related, through_name=through_name, excludable=excludable, row=row, related=related, through_name=through_name, excludable=excludable
) )
if child.__class__ != proxy_source_model: if child.__class__ != proxy_source_model:
@ -378,7 +368,7 @@ class ModelRow(NewBaseModel):
:rtype: Dict :rtype: Dict
""" """
selected_columns = cls.own_table_columns( selected_columns = cls.own_table_columns(
model=cls, excludable=excludable, alias=table_prefix, use_alias=False, model=cls, excludable=excludable, alias=table_prefix, use_alias=False
) )
column_prefix = table_prefix + "_" if table_prefix else "" column_prefix = table_prefix + "_" if table_prefix else ""

View File

@ -18,17 +18,17 @@ from typing import (
cast, cast,
) )
import databases
import pydantic
import sqlalchemy
from ormar.models.utils import Extra from ormar.models.utils import Extra
from pydantic import BaseModel
try: try:
import orjson as json import orjson as json
except ImportError: # pragma: no cover except ImportError: # pragma: no cover
import json # type: ignore import json # type: ignore
import databases
import pydantic
import sqlalchemy
from pydantic import BaseModel
import ormar # noqa I100 import ormar # noqa I100
from ormar.exceptions import ModelError, ModelPersistenceError from ormar.exceptions import ModelError, ModelPersistenceError
@ -154,7 +154,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
# register the columns models after initialization # register the columns models after initialization
for related in self.extract_related_names().union(self.extract_through_names()): for related in self.extract_related_names().union(self.extract_through_names()):
model_fields[related].expand_relationship( model_fields[related].expand_relationship(
new_kwargs.get(related), self, to_register=True, new_kwargs.get(related), self, to_register=True
) )
if hasattr(self, "_init_private_attributes"): if hasattr(self, "_init_private_attributes"):
@ -218,7 +218,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
f"need to call update_forward_refs()." f"need to call update_forward_refs()."
) )
def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]: def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]: # noqa: CCR001
""" """
Initializes nested models. Initializes nested models.
@ -261,7 +261,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
k, k,
self._convert_json( self._convert_json(
k, k,
model_fields[k].expand_relationship(v, self, to_register=False,) model_fields[k].expand_relationship(v, self, to_register=False)
if k in model_fields if k in model_fields
else (v if k in pydantic_fields else model_fields[k]), else (v if k in pydantic_fields else model_fields[k]),
), ),
@ -315,7 +315,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
self, self,
"_orm", "_orm",
RelationsManager( RelationsManager(
related_fields=self.extract_related_fields(), owner=cast("Model", self), related_fields=self.extract_related_fields(), owner=cast("Model", self)
), ),
) )
@ -488,7 +488,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
@staticmethod @staticmethod
def _get_not_excluded_fields( def _get_not_excluded_fields(
fields: Union[List, Set], include: Optional[Dict], exclude: Optional[Dict], fields: Union[List, Set], include: Optional[Dict], exclude: Optional[Dict]
) -> List: ) -> List:
""" """
Returns related field names applying on them include and exclude set. Returns related field names applying on them include and exclude set.

View File

@ -57,7 +57,7 @@ class FilterAction(QueryAction):
Extracted in order to easily change table prefixes on complex relations. Extracted in order to easily change table prefixes on complex relations.
""" """
def __init__(self, filter_str: str, value: Any, model_cls: Type["Model"],) -> None: def __init__(self, filter_str: str, value: Any, model_cls: Type["Model"]) -> None:
super().__init__(query_str=filter_str, model_cls=model_cls) super().__init__(query_str=filter_str, model_cls=model_cls)
self.filter_value = value self.filter_value = value
self._escape_characters_in_clause() self._escape_characters_in_clause()
@ -148,7 +148,7 @@ class FilterAction(QueryAction):
filter_value = self.filter_value filter_value = self.filter_value
clause = getattr(self.column, op_attr)(filter_value) clause = getattr(self.column, op_attr)(filter_value)
clause = self._compile_clause( clause = self._compile_clause(
clause, modifiers={"escape": "\\" if self.has_escaped_character else None}, clause, modifiers={"escape": "\\" if self.has_escaped_character else None}
) )
return clause return clause
@ -170,7 +170,7 @@ class FilterAction(QueryAction):
] ]
def _compile_clause( def _compile_clause(
self, clause: sqlalchemy.sql.expression.BinaryExpression, modifiers: Dict, self, clause: sqlalchemy.sql.expression.BinaryExpression, modifiers: Dict
) -> sqlalchemy.sql.expression.TextClause: ) -> sqlalchemy.sql.expression.TextClause:
""" """
Compiles the clause to str using appropriate database dialect, replace columns Compiles the clause to str using appropriate database dialect, replace columns

View File

@ -185,7 +185,7 @@ class QueryClause:
""" """
def __init__( def __init__(
self, model_cls: Type["Model"], filter_clauses: List, select_related: List, self, model_cls: Type["Model"], filter_clauses: List, select_related: List
) -> None: ) -> None:
self._select_related = select_related[:] self._select_related = select_related[:]

View File

@ -1,14 +1,5 @@
from collections import OrderedDict from collections import OrderedDict
from typing import ( from typing import Any, Dict, List, Optional, TYPE_CHECKING, Tuple, Type, cast
Any,
Dict,
List,
Optional,
TYPE_CHECKING,
Tuple,
Type,
cast,
)
import sqlalchemy import sqlalchemy
from sqlalchemy import text from sqlalchemy import text
@ -102,9 +93,7 @@ class SqlJoin:
""" """
return self.next_model.Meta.table return self.next_model.Meta.table
def _on_clause( def _on_clause(self, previous_alias: str, from_clause: str, to_clause: str) -> text:
self, previous_alias: str, from_clause: str, to_clause: str,
) -> text:
""" """
Receives aliases and names of both ends of the join and combines them Receives aliases and names of both ends of the join and combines them
into one text clause used in joins. into one text clause used in joins.
@ -140,12 +129,7 @@ class SqlJoin:
self._process_following_joins() self._process_following_joins()
return ( return (self.used_aliases, self.select_from, self.columns, self.sorted_orders)
self.used_aliases,
self.select_from,
self.columns,
self.sorted_orders,
)
def _forward_join(self) -> None: def _forward_join(self) -> None:
""" """
@ -269,7 +253,7 @@ class SqlJoin:
new_part = target_field.default_target_field_name() # type: ignore new_part = target_field.default_target_field_name() # type: ignore
return new_part return new_part
def _process_join(self,) -> None: # noqa: CFQ002 def _process_join(self) -> None: # noqa: CFQ002
""" """
Resolves to and from column names and table names. Resolves to and from column names and table names.
@ -316,7 +300,7 @@ class SqlJoin:
def _set_default_primary_key_order_by(self) -> None: def _set_default_primary_key_order_by(self) -> None:
for order_by in self.next_model.Meta.orders_by: for order_by in self.next_model.Meta.orders_by:
clause = ormar.OrderAction( clause = ormar.OrderAction(
order_str=order_by, model_cls=self.next_model, alias=self.next_alias, order_str=order_by, model_cls=self.next_model, alias=self.next_alias
) )
self.sorted_orders[clause] = clause.get_text_clause() self.sorted_orders[clause] = clause.get_text_clause()
@ -355,8 +339,7 @@ class SqlJoin:
model = self.target_field.to model = self.target_field.to
else: else:
alias = self.alias_manager.resolve_relation_alias( alias = self.alias_manager.resolve_relation_alias(
from_model=self.target_field.owner, from_model=self.target_field.owner, relation_name=self.target_field.name
relation_name=self.target_field.name,
) )
model = self.target_field.to model = self.target_field.to

View File

@ -1,13 +1,4 @@
from typing import ( from typing import Dict, List, Sequence, Set, TYPE_CHECKING, Tuple, Type, cast
Dict,
List,
Sequence,
Set,
TYPE_CHECKING,
Tuple,
Type,
cast,
)
import ormar import ormar
from ormar.queryset.clause import QueryClause from ormar.queryset.clause import QueryClause
@ -192,7 +183,7 @@ class PrefetchQuery:
return list_of_ids return list_of_ids
def _extract_required_ids( def _extract_required_ids(
self, parent_model: Type["Model"], reverse: bool, related: str, self, parent_model: Type["Model"], reverse: bool, related: str
) -> Set: ) -> Set:
""" """
Delegates extraction of the fields to either get ids from raw sql response Delegates extraction of the fields to either get ids from raw sql response
@ -210,10 +201,7 @@ class PrefetchQuery:
use_raw = parent_model.get_name() not in self.models use_raw = parent_model.get_name() not in self.models
column_name = parent_model.get_column_name_for_id_extraction( column_name = parent_model.get_column_name_for_id_extraction(
parent_model=parent_model, parent_model=parent_model, reverse=reverse, related=related, use_raw=use_raw
reverse=reverse,
related=related,
use_raw=use_raw,
) )
if use_raw: if use_raw:
@ -263,7 +251,7 @@ class PrefetchQuery:
related=related, related=related,
) )
qryclause = QueryClause( qryclause = QueryClause(
model_cls=clause_target, select_related=[], filter_clauses=[], model_cls=clause_target, select_related=[], filter_clauses=[]
) )
kwargs = {f"{filter_column}__in": ids} kwargs = {f"{filter_column}__in": ids}
filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs) filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs)
@ -271,7 +259,7 @@ class PrefetchQuery:
return [] return []
def _populate_nested_related( def _populate_nested_related(
self, model: "Model", prefetch_dict: Dict, orders_by: Dict, self, model: "Model", prefetch_dict: Dict, orders_by: Dict
) -> "Model": ) -> "Model":
""" """
Populates all related models children of parent model that are Populates all related models children of parent model that are
@ -540,7 +528,7 @@ class PrefetchQuery:
) )
def _update_already_loaded_rows( # noqa: CFQ002 def _update_already_loaded_rows( # noqa: CFQ002
self, target_field: "BaseField", prefetch_dict: Dict, orders_by: Dict, self, target_field: "BaseField", prefetch_dict: Dict, orders_by: Dict
) -> None: ) -> None:
""" """
Updates models that are already loaded, usually children of children. Updates models that are already loaded, usually children of children.
@ -598,7 +586,7 @@ class PrefetchQuery:
for row in rows: for row in rows:
field_name = parent_model.get_related_field_name(target_field=target_field) field_name = parent_model.get_related_field_name(target_field=target_field)
item = target_model.extract_prefixed_table_columns( item = target_model.extract_prefixed_table_columns(
item={}, row=row, table_prefix=table_prefix, excludable=excludable, item={}, row=row, table_prefix=table_prefix, excludable=excludable
) )
item["__excluded__"] = target_model.get_names_to_exclude( item["__excluded__"] = target_model.get_names_to_exclude(
excludable=excludable, alias=exclude_prefix excludable=excludable, alias=exclude_prefix

View File

@ -111,7 +111,7 @@ class Query:
:rtype: sqlalchemy.sql.selectable.Select :rtype: sqlalchemy.sql.selectable.Select
""" """
self_related_fields = self.model_cls.own_table_columns( self_related_fields = self.model_cls.own_table_columns(
model=self.model_cls, excludable=self.excludable, use_alias=True, model=self.model_cls, excludable=self.excludable, use_alias=True
) )
self.columns = self.model_cls.Meta.alias_manager.prefixed_columns( self.columns = self.model_cls.Meta.alias_manager.prefixed_columns(
"", self.table, self_related_fields "", self.table, self_related_fields

View File

@ -247,7 +247,7 @@ class QuerySet(Generic[T]):
return self.model_meta.table return self.model_meta.table
def build_select_expression( def build_select_expression(
self, limit: int = None, offset: int = None, order_bys: List = None, self, limit: int = None, offset: int = None, order_bys: List = None
) -> sqlalchemy.sql.select: ) -> sqlalchemy.sql.select:
""" """
Constructs the actual database query used in the QuerySet. Constructs the actual database query used in the QuerySet.
@ -378,7 +378,7 @@ class QuerySet(Generic[T]):
] ]
related = sorted(list(set(list(self._select_related) + related))) related = sorted(list(set(list(self._select_related) + related)))
return self.rebuild_self(select_related=related,) return self.rebuild_self(select_related=related)
def select_all(self, follow: bool = False) -> "QuerySet[T]": def select_all(self, follow: bool = False) -> "QuerySet[T]":
""" """
@ -404,7 +404,7 @@ class QuerySet(Generic[T]):
relations = list(self.model.extract_related_names()) relations = list(self.model.extract_related_names())
if follow: if follow:
relations = self.model._iterate_related_models() relations = self.model._iterate_related_models()
return self.rebuild_self(select_related=relations,) return self.rebuild_self(select_related=relations)
def prefetch_related( def prefetch_related(
self, related: Union[List, str, FieldAccessor] self, related: Union[List, str, FieldAccessor]
@ -434,7 +434,7 @@ class QuerySet(Generic[T]):
] ]
related = list(set(list(self._prefetch_related) + related)) related = list(set(list(self._prefetch_related) + related))
return self.rebuild_self(prefetch_related=related,) return self.rebuild_self(prefetch_related=related)
def fields( def fields(
self, columns: Union[List, str, Set, Dict], _is_exclude: bool = False self, columns: Union[List, str, Set, Dict], _is_exclude: bool = False
@ -490,7 +490,7 @@ class QuerySet(Generic[T]):
is_exclude=_is_exclude, is_exclude=_is_exclude,
) )
return self.rebuild_self(excludable=excludable,) return self.rebuild_self(excludable=excludable)
def exclude_fields(self, columns: Union[List, str, Set, Dict]) -> "QuerySet[T]": def exclude_fields(self, columns: Union[List, str, Set, Dict]) -> "QuerySet[T]":
""" """
@ -564,7 +564,7 @@ class QuerySet(Generic[T]):
] ]
order_bys = self.order_bys + [x for x in orders_by if x not in self.order_bys] order_bys = self.order_bys + [x for x in orders_by if x not in self.order_bys]
return self.rebuild_self(order_bys=order_bys,) return self.rebuild_self(order_bys=order_bys)
async def values( async def values(
self, self,
@ -821,7 +821,7 @@ class QuerySet(Generic[T]):
limit_count = page_size limit_count = page_size
query_offset = (page - 1) * page_size query_offset = (page - 1) * page_size
return self.rebuild_self(limit_count=limit_count, offset=query_offset,) return self.rebuild_self(limit_count=limit_count, offset=query_offset)
def limit(self, limit_count: int, limit_raw_sql: bool = None) -> "QuerySet[T]": def limit(self, limit_count: int, limit_raw_sql: bool = None) -> "QuerySet[T]":
""" """
@ -838,7 +838,7 @@ class QuerySet(Generic[T]):
:rtype: QuerySet :rtype: QuerySet
""" """
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(limit_count=limit_count, limit_raw_sql=limit_raw_sql,) return self.rebuild_self(limit_count=limit_count, limit_raw_sql=limit_raw_sql)
def offset(self, offset: int, limit_raw_sql: bool = None) -> "QuerySet[T]": def offset(self, offset: int, limit_raw_sql: bool = None) -> "QuerySet[T]":
""" """
@ -855,7 +855,7 @@ class QuerySet(Generic[T]):
:rtype: QuerySet :rtype: QuerySet
""" """
limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql limit_raw_sql = self.limit_sql_raw if limit_raw_sql is None else limit_raw_sql
return self.rebuild_self(offset=offset, limit_raw_sql=limit_raw_sql,) return self.rebuild_self(offset=offset, limit_raw_sql=limit_raw_sql)
async def first(self, *args: Any, **kwargs: Any) -> "T": async def first(self, *args: Any, **kwargs: Any) -> "T":
""" """

View File

@ -221,13 +221,13 @@ def extract_nested_models( # noqa: CCR001
if select_dict[related] is not Ellipsis: if select_dict[related] is not Ellipsis:
for sub_child in child: for sub_child in child:
extract_nested_models( extract_nested_models(
sub_child, target_model, select_dict[related], extracted, sub_child, target_model, select_dict[related], extracted
) )
else: else:
extracted.setdefault(target_model.get_name(), []).append(child) extracted.setdefault(target_model.get_name(), []).append(child)
if select_dict[related] is not Ellipsis: if select_dict[related] is not Ellipsis:
extract_nested_models( extract_nested_models(
child, target_model, select_dict[related], extracted, child, target_model, select_dict[related], extracted
) )

View File

@ -99,7 +99,7 @@ class AliasManager:
return table.alias(f"{alias}_{table.name}") return table.alias(f"{alias}_{table.name}")
def add_relation_type( def add_relation_type(
self, source_model: Type["Model"], relation_name: str, reverse_name: str = None, self, source_model: Type["Model"], relation_name: str, reverse_name: str = None
) -> None: ) -> None:
""" """
Registers the relations defined in ormar models. Registers the relations defined in ormar models.

View File

@ -294,7 +294,7 @@ class QuerysetProxy(Generic[T]):
:type fields: Union[List, str, Set, Dict] :type fields: Union[List, str, Set, Dict]
""" """
return await self.queryset.values( return await self.queryset.values(
fields=fields, exclude_through=exclude_through, fields=fields, exclude_through=exclude_through
) )
async def values_list( async def values_list(
@ -479,8 +479,7 @@ class QuerysetProxy(Generic[T]):
await child.update(**kwargs) # type: ignore await child.update(**kwargs) # type: ignore
if self.type_ == ormar.RelationType.MULTIPLE and through_kwargs: if self.type_ == ormar.RelationType.MULTIPLE and through_kwargs:
await self.update_through_instance( await self.update_through_instance(
child=child, # type: ignore child=child, **through_kwargs # type: ignore
**through_kwargs,
) )
return len(children) return len(children)

View File

@ -57,7 +57,7 @@ class RelationsManager:
return None # pragma nocover return None # pragma nocover
@staticmethod @staticmethod
def add(parent: "Model", child: "Model", field: "ForeignKeyField",) -> None: def add(parent: "Model", child: "Model", field: "ForeignKeyField") -> None:
""" """
Adds relation on both sides -> meaning on both child and parent models. Adds relation on both sides -> meaning on both child and parent models.
One side of the relation is always weakref proxy to avoid circular refs. One side of the relation is always weakref proxy to avoid circular refs.
@ -73,7 +73,7 @@ class RelationsManager:
:param field: field with relation definition :param field: field with relation definition
:type field: ForeignKeyField :type field: ForeignKeyField
""" """
(parent, child, child_name, to_name,) = get_relations_sides_and_names( (parent, child, child_name, to_name) = get_relations_sides_and_names(
field, parent, child field, parent, child
) )

View File

@ -8,7 +8,7 @@ if TYPE_CHECKING: # pragma no cover
def get_relations_sides_and_names( def get_relations_sides_and_names(
to_field: ForeignKeyField, parent: "Model", child: "Model", to_field: ForeignKeyField, parent: "Model", child: "Model"
) -> Tuple["Model", "Model", str, str]: ) -> Tuple["Model", "Model", str, str]:
""" """
Determines the names of child and parent relations names, as well as Determines the names of child and parent relations names, as well as

2409
poetry.lock generated Normal file

File diff suppressed because it is too large Load Diff

132
pyproject.toml Normal file
View File

@ -0,0 +1,132 @@
[tool.poetry]
name = "ormar"
version = "0.10.20"
description = "A simple async ORM with fastapi in mind and pydantic validation."
authors = ["Radosław Drążkiewicz <collerek@gmail.com>"]
license = "MIT"
readme = "README.md"
repository = "https://github.com/collerek/ormar"
documentation = "https://collerek.github.io/ormar/"
keywords = [
"orm",
"sqlalchemy",
"fastapi",
"pydantic",
"databases",
"async",
"alembic",
]
classifiers = [
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Framework :: AsyncIO",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
]
[tool.poetry.dependencies]
python = "^3.6.2"
databases = ">=0.3.2,<0.5.3"
pydantic = ">=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2"
SQLAlchemy = ">=1.3.18,<1.4.26"
asyncpg = {version = "^0.24.0", optional = true}
psycopg2-binary = {version = "^2.9.1", optional = true}
aiomysql = {version = "^0.0.21", optional = true}
aiosqlite = {version = "^0.17.0", optional = true}
cryptography = {version = "^35.0.0", optional = true}
[tool.poetry.dependencies.orjson]
version = "^3.6.4"
optional = true
python = ">=3.7"
[tool.poetry.dependencies.typing-extensions]
version = "^3.7"
python = "<3.8"
[tool.poetry.dependencies.importlib-metadata]
version = ">=1.0"
python = "<3.8"
[tool.poetry.dev-dependencies]
# Async database driversy
aiomysql = "^0.0.21"
aiosqlite = "^0.17.0"
aiopg = "^1.3.2"
asyncpg = "^0.24.0"
# Sync database drivers for standard tooling around setup/teardown/migrations.
psycopg2-binary = "^2.9.1"
mysqlclient = "^2.0.3"
PyMySQL = ">=0.9,<=0.9.3"
# Testing
pytest = "^6.2.5"
pytest-cov = "^3.0.0"
codecov = "^2.1.12"
pytest-asyncio = "^0.15.1"
fastapi = "^0.70.0"
flake8 = "^3.9.2"
flake8-black = "^0.2.3"
flake8-bugbear = "^21.9.2"
flake8-import-order = "^0.18.1"
flake8-bandit = "^2.1.2"
flake8-builtins = "^1.5.3"
flake8-variables-names = "^0.0.4"
flake8-cognitive-complexity = "^0.1.0"
flake8-functions = "^0.0.6"
flake8-expression-complexity = "^0.0.9"
# types
mypy = "^0.910"
types-ujson = "^0.1.1"
types-PyMySQL = "^1.0.2"
types-ipaddress = "^1.0.0"
types-enum34 = "^1.1.0"
types-cryptography = "^3.3.5"
types-orjson = "^3.6.0"
types-aiofiles = "^0.1.9"
types-pkg-resources = "^0.1.3"
types-requests = "^2.25.9"
types-toml = "^0.10.0"
# Documantation
mkdocs = "^1.2.2"
mkdocs-material = "^7.3.2"
mkdocs-material-extensions = "^1.0.3"
pydoc-markdown = {version = "^4.3.2", markers = "python_version > '3.7'"}
dataclasses = {version = ">=0.6.0,<0.8 || >0.8,<1.0.0" }
# Performance testing
yappi = "^1.3.3"
[tool.poetry.extras]
postgresql = ["asyncpg", "psycopg2-binary"]
postgres = ["asyncpg", "psycopg2-binary"]
mysql = ["aiomysql"]
sqlite = ["sqlite"]
orjson = ["orjson"]
crypto = ["cryptography"]
dev = [
"asyncpg",
"psycopg2-binary",
"aiomysql",
"sqlite",
"orjson",
"cryptography",
]
[build-system]
requires = ["poetry-core>=1.0.0"]
build-backend = "poetry.core.masonry.api"

View File

@ -1,58 +0,0 @@
databases>=0.3.2,<0.5.3
pydantic >=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2
sqlalchemy>=1.3.18,<1.4.26
typing_extensions>=3.7,<3.10.0.3
orjson
cryptography
# Async database driversy
aiomysql
aiosqlite
aiopg
asyncpg
# Sync database drivers for standard tooling around setup/teardown/migrations.
pymysql
psycopg2-binary
mysqlclient
# Testing
pytest
pytest-cov
codecov
pytest-asyncio
fastapi
flake8
flake8-black
flake8-bugbear
flake8-import-order
flake8-bandit
flake8-annotations
flake8-builtins
flake8-variables-names
flake8-cognitive-complexity
flake8-functions
flake8-expression-complexity
# types
mypy
types-ujson
types-PyMySQL
types-ipaddress
types-enum34
types-cryptography
types-orjson
types-aiofiles
types-pkg_resources
types-requests
types-toml
# Documantation
mkdocs
mkdocs-material
mkdocs-material-extensions
pydoc-markdown
# Performance testing
yappi

View File

@ -1,93 +0,0 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
from setuptools import setup
PACKAGE = "ormar"
URL = "https://github.com/collerek/ormar"
def get_version(package):
"""
Return package version as listed in `__version__` in `init.py`.
"""
with open(os.path.join(package, "__init__.py")) as f:
return re.search("__version__ = ['\"]([^'\"]+)['\"]", f.read()).group(1)
def get_long_description():
"""
Return the README.
"""
with open("README.md", encoding="utf8") as f:
return f.read()
def get_packages(package):
"""
Return root package and all sub-packages.
"""
return [
dirpath
for dirpath, dirnames, filenames in os.walk(package)
if os.path.exists(os.path.join(dirpath, "__init__.py"))
]
setup(
name=PACKAGE,
version=get_version(PACKAGE),
url=URL,
license="MIT",
description="A simple async ORM with fastapi in mind and pydantic validation.",
long_description=get_long_description(),
long_description_content_type="text/markdown",
keywords=[
"orm",
"sqlalchemy",
"fastapi",
"pydantic",
"databases",
"async",
"alembic",
],
author="Radosław Drążkiewicz",
author_email="collerek@gmail.com",
packages=get_packages(PACKAGE),
package_data={PACKAGE: ["py.typed"]},
include_package_data=True,
zip_safe=False,
python_requires=">=3.6",
data_files=[("", ["LICENSE.md"])],
install_requires=[
"databases>=0.3.2,<0.5.3",
"pydantic>=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2",
"sqlalchemy>=1.3.18,<1.4.26",
"typing_extensions>=3.7,<3.10.0.3",
],
extras_require={
"postgresql": ["asyncpg", "psycopg2-binary"],
"mysql": ["aiomysql", "pymysql"],
"sqlite": ["aiosqlite"],
"orjson": ["orjson"],
"crypto": ["cryptography"],
},
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP",
"Framework :: AsyncIO",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3 :: Only",
],
)

View File

@ -25,7 +25,7 @@ class BaseMeta(ormar.ModelMeta):
default_fernet = dict( default_fernet = dict(
encrypt_secret="asd123", encrypt_backend=ormar.EncryptBackends.FERNET, encrypt_secret="asd123", encrypt_backend=ormar.EncryptBackends.FERNET
) )

View File

@ -135,7 +135,7 @@ class Project(orm.Model):
type: str = orm.String(max_length=10, default="cs") type: str = orm.String(max_length=10, default="cs")
target_branch_name: str = orm.String(max_length=100, default="master") target_branch_name: str = orm.String(max_length=100, default="master")
header: str = orm.String(max_length=250, default="") header: str = orm.String(max_length=250, default="")
jira_url: str = orm.String(max_length=500,) jira_url: str = orm.String(max_length=500)
changelog_file: str = orm.String(max_length=250, default="") changelog_file: str = orm.String(max_length=250, default="")
version_file: str = orm.String(max_length=250, default="") version_file: str = orm.String(max_length=250, default="")

View File

@ -106,13 +106,7 @@ def compare_results_include(excludable):
def test_excluding_fields_from_list(): def test_excluding_fields_from_list():
fields = [ fields = ["gearbox_type", "gears", "aircon_type", "year", "manufacturer__founded"]
"gearbox_type",
"gears",
"aircon_type",
"year",
"manufacturer__founded",
]
excludable = ExcludableItems() excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=True) excludable.build(items=fields, model_cls=Car, is_exclude=True)
compare_results(excludable) compare_results(excludable)
@ -174,7 +168,7 @@ def test_nested_includes_from_dict():
fields = { fields = {
"id": ..., "id": ...,
"name": ..., "name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name": ...}},}, "manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name": ...}}},
} }
excludable = ExcludableItems() excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False) excludable.build(items=fields, model_cls=Car, is_exclude=False)
@ -185,7 +179,7 @@ def test_nested_includes_from_dict_with_set():
fields = { fields = {
"id": ..., "id": ...,
"name": ..., "name": ...,
"manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name"}},}, "manufacturer": {"name": ..., "hq": {"name": ..., "nicks": {"name"}}},
} }
excludable = ExcludableItems() excludable = ExcludableItems()
excludable.build(items=fields, model_cls=Car, is_exclude=False) excludable.build(items=fields, model_cls=Car, is_exclude=False)

View File

@ -167,11 +167,7 @@ def test_excluding_fields_in_endpoints():
assert created_user.pk is not None assert created_user.pk is not None
assert created_user.password is None assert created_user.password is None
user2 = { user2 = {"email": "test@domain.com", "first_name": "John", "last_name": "Doe"}
"email": "test@domain.com",
"first_name": "John",
"last_name": "Doe",
}
response = client.post("/users/", json=user2) response = client.post("/users/", json=user2)
created_user = User(**response.json()) created_user = User(**response.json())

View File

@ -133,9 +133,7 @@ class Car2(ormar.Model):
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=50) name: str = ormar.String(max_length=50)
owner: Person = ormar.ForeignKey(Person, related_name="owned") owner: Person = ormar.ForeignKey(Person, related_name="owned")
co_owners: List[Person] = ormar.ManyToMany( co_owners: List[Person] = ormar.ManyToMany(Person, related_name="coowned")
Person, related_name="coowned",
)
created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now) created_date: datetime.datetime = ormar.DateTime(default=datetime.datetime.now)
@ -204,7 +202,7 @@ def test_field_redefining_in_concrete_models():
assert changed_field.get_alias() == "creation_date" assert changed_field.get_alias() == "creation_date"
assert any(x.name == "creation_date" for x in RedefinedField.Meta.table.columns) assert any(x.name == "creation_date" for x in RedefinedField.Meta.table.columns)
assert isinstance( assert isinstance(
RedefinedField.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String, RedefinedField.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String
) )

View File

@ -95,8 +95,7 @@ def test_field_redefining_in_second_raises_error():
) )
assert any(x.name == "creation_date" for x in RedefinedField2.Meta.table.columns) assert any(x.name == "creation_date" for x in RedefinedField2.Meta.table.columns)
assert isinstance( assert isinstance(
RedefinedField2.Meta.table.columns["creation_date"].type, RedefinedField2.Meta.table.columns["creation_date"].type, sa.sql.sqltypes.String
sa.sql.sqltypes.String,
) )

View File

@ -62,18 +62,18 @@ async def cleanup():
@pytest.mark.asyncio @pytest.mark.asyncio
async def test_creating_a_position(cleanup): async def test_creating_a_position(cleanup):
async with database: async with database:
instance = PositionOrm(name="my_pos", x=1.0, y=2.0, degrees=3.0,) instance = PositionOrm(name="my_pos", x=1.0, y=2.0, degrees=3.0)
await instance.save() await instance.save()
assert instance.saved assert instance.saved
assert instance.name == "my_pos" assert instance.name == "my_pos"
instance2 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0,) instance2 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance2.save() await instance2.save()
assert instance2.saved assert instance2.saved
assert instance2.name is not None assert instance2.name is not None
assert len(instance2.name) == 12 assert len(instance2.name) == 12
instance3 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0,) instance3 = PositionOrmDef(x=1.0, y=2.0, degrees=3.0)
await instance3.save() await instance3.save()
assert instance3.saved assert instance3.saved
assert instance3.name is not None assert instance3.name is not None

View File

@ -117,7 +117,7 @@ def test_operator_return_proper_filter_action(method, expected, expected_value):
} }
@pytest.mark.parametrize("method, expected_direction", [("asc", ""), ("desc", "desc"),]) @pytest.mark.parametrize("method, expected_direction", [("asc", ""), ("desc", "desc")])
def test_operator_return_proper_order_action(method, expected_direction): def test_operator_return_proper_order_action(method, expected_direction):
action = getattr(Product.name, method)() action = getattr(Product.name, method)()
assert action.source_model == Product assert action.source_model == Product

View File

@ -126,7 +126,7 @@ class Country(ormar.Model):
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
name: str = ormar.String( name: str = ormar.String(
max_length=9, choices=country_name_choices, default="Canada", max_length=9, choices=country_name_choices, default="Canada"
) )
taxed: bool = ormar.Boolean(choices=country_taxed_choices, default=True) taxed: bool = ormar.Boolean(choices=country_taxed_choices, default=True)
country_code: int = ormar.Integer( country_code: int = ormar.Integer(

View File

@ -35,7 +35,7 @@ class SecondaryModel(ormar.Model):
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100) name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey( primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models", PrimaryModel, related_name="secondary_models"
) )

View File

@ -31,7 +31,7 @@ class DataSourceTable(ormar.Model):
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=200, index=True) name: str = ormar.String(max_length=200, index=True)
source: Optional[DataSource] = ormar.ForeignKey( source: Optional[DataSource] = ormar.ForeignKey(
DataSource, name="source_id", related_name="tables", ondelete="CASCADE", DataSource, name="source_id", related_name="tables", ondelete="CASCADE"
) )
@ -43,7 +43,7 @@ class DataSourceTableColumn(ormar.Model):
name: str = ormar.String(max_length=200, index=True) name: str = ormar.String(max_length=200, index=True)
data_type: str = ormar.String(max_length=200) data_type: str = ormar.String(max_length=200)
table: Optional[DataSourceTable] = ormar.ForeignKey( table: Optional[DataSourceTable] = ormar.ForeignKey(
DataSourceTable, name="table_id", related_name="columns", ondelete="CASCADE", DataSourceTable, name="table_id", related_name="columns", ondelete="CASCADE"
) )

View File

@ -130,7 +130,7 @@ async def test_or_filters():
) )
) )
& (Book.title.startswith("The")) & (Book.title.startswith("The"))
), )
) )
.all() .all()
) )

View File

@ -95,6 +95,6 @@ async def test_add_students():
assert user.attending is not None assert user.attending is not None
assert len(user.attending) > 0 assert len(user.attending) > 0
query = Session.objects.prefetch_related(["students", "teacher",]) query = Session.objects.prefetch_related(["students", "teacher"])
sessions = await query.all() sessions = await query.all()
assert len(sessions) == 5 assert len(sessions) == 5

View File

@ -56,7 +56,7 @@ class SecondaryModel(ormar.Model):
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100) name: str = ormar.String(max_length=100)
primary_model: PrimaryModel = ormar.ForeignKey( primary_model: PrimaryModel = ormar.ForeignKey(
PrimaryModel, related_name="secondary_models", PrimaryModel, related_name="secondary_models"
) )

View File

@ -6,14 +6,7 @@ import pytest
import sqlalchemy import sqlalchemy
import ormar import ormar
from ormar import ( from ormar import post_delete, post_save, post_update, pre_delete, pre_save, pre_update
post_delete,
post_save,
post_update,
pre_delete,
pre_save,
pre_update,
)
from ormar.exceptions import SignalDefinitionError from ormar.exceptions import SignalDefinitionError
from tests.settings import DATABASE_URL from tests.settings import DATABASE_URL