added mypy checks and some typehint changes to conform
This commit is contained in:
@ -28,15 +28,19 @@ class ModelMeta:
|
||||
database: databases.Database
|
||||
columns: List[sqlalchemy.Column]
|
||||
pkname: str
|
||||
model_fields: Dict[str, Union[BaseField, ForeignKey]]
|
||||
model_fields: Dict[
|
||||
str, Union[Type[BaseField], Type[ForeignKeyField], Type[ManyToManyField]]
|
||||
]
|
||||
alias_manager: AliasManager
|
||||
|
||||
|
||||
def register_relation_on_build(table_name: str, field: ForeignKey) -> None:
|
||||
def register_relation_on_build(table_name: str, field: Type[ForeignKeyField]) -> None:
|
||||
alias_manager.add_relation_type(field.to.Meta.tablename, table_name)
|
||||
|
||||
|
||||
def register_many_to_many_relation_on_build(table_name: str, field: ManyToMany) -> None:
|
||||
def register_many_to_many_relation_on_build(
|
||||
table_name: str, field: Type[ManyToManyField]
|
||||
) -> None:
|
||||
alias_manager.add_relation_type(field.through.Meta.tablename, table_name)
|
||||
alias_manager.add_relation_type(
|
||||
field.through.Meta.tablename, field.to.Meta.tablename
|
||||
@ -106,7 +110,7 @@ def create_pydantic_field(
|
||||
) -> None:
|
||||
model_field.through.__fields__[field_name] = ModelField(
|
||||
name=field_name,
|
||||
type_=Optional[model],
|
||||
type_=model,
|
||||
model_config=model.__config__,
|
||||
required=False,
|
||||
class_validators={},
|
||||
@ -130,7 +134,7 @@ def create_and_append_m2m_fk(
|
||||
|
||||
|
||||
def check_pk_column_validity(
|
||||
field_name: str, field: BaseField, pkname: str
|
||||
field_name: str, field: BaseField, pkname: Optional[str]
|
||||
) -> Optional[str]:
|
||||
if pkname is not None:
|
||||
raise ModelDefinitionError("Only one primary key column is allowed.")
|
||||
@ -218,6 +222,7 @@ def populate_meta_tablename_columns_and_pk(
|
||||
) -> Type["Model"]:
|
||||
tablename = name.lower() + "s"
|
||||
new_model.Meta.tablename = new_model.Meta.tablename or tablename
|
||||
pkname: Optional[str]
|
||||
|
||||
if hasattr(new_model.Meta, "columns"):
|
||||
columns = new_model.Meta.table.columns
|
||||
@ -226,12 +231,13 @@ def populate_meta_tablename_columns_and_pk(
|
||||
pkname, columns = sqlalchemy_columns_from_model_fields(
|
||||
new_model.Meta.model_fields, new_model.Meta.tablename
|
||||
)
|
||||
|
||||
if pkname is None:
|
||||
raise ModelDefinitionError("Table has to have a primary key.")
|
||||
|
||||
new_model.Meta.columns = columns
|
||||
new_model.Meta.pkname = pkname
|
||||
|
||||
if not new_model.Meta.pkname:
|
||||
raise ModelDefinitionError("Table has to have a primary key.")
|
||||
|
||||
return new_model
|
||||
|
||||
|
||||
@ -253,8 +259,8 @@ def get_pydantic_base_orm_config() -> Type[BaseConfig]:
|
||||
return Config
|
||||
|
||||
|
||||
def check_if_field_has_choices(field: BaseField) -> bool:
|
||||
return hasattr(field, "choices") and field.choices
|
||||
def check_if_field_has_choices(field: Type[BaseField]) -> bool:
|
||||
return hasattr(field, "choices") and bool(field.choices)
|
||||
|
||||
|
||||
def model_initialized_and_has_model_fields(model: Type["Model"]) -> bool:
|
||||
@ -287,7 +293,7 @@ def populate_choices_validators( # noqa CCR001
|
||||
|
||||
|
||||
class ModelMetaclass(pydantic.main.ModelMetaclass):
|
||||
def __new__(mcs: type, name: str, bases: Any, attrs: dict) -> type:
|
||||
def __new__(mcs: "ModelMetaclass", name: str, bases: Any, attrs: dict) -> "ModelMetaclass": # type: ignore
|
||||
attrs["Config"] = get_pydantic_base_orm_config()
|
||||
attrs["__name__"] = name
|
||||
attrs = extract_annotations_and_default_vals(attrs, bases)
|
||||
@ -306,7 +312,7 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
|
||||
field_name = new_model.Meta.pkname
|
||||
field = Integer(name=field_name, primary_key=True)
|
||||
attrs["__annotations__"][field_name] = field
|
||||
populate_default_pydantic_field_value(field, field_name, attrs)
|
||||
populate_default_pydantic_field_value(field, field_name, attrs) # type: ignore
|
||||
|
||||
new_model = super().__new__( # type: ignore
|
||||
mcs, name, bases, attrs
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import itertools
|
||||
from typing import Any, List, Tuple, Union
|
||||
from typing import Any, List, Dict, Optional
|
||||
|
||||
import sqlalchemy
|
||||
from databases.backends.postgres import Record
|
||||
@ -9,8 +9,8 @@ from ormar.fields.many_to_many import ManyToManyField
|
||||
from ormar.models import NewBaseModel # noqa I100
|
||||
|
||||
|
||||
def group_related_list(list_: List) -> dict:
|
||||
test_dict = dict()
|
||||
def group_related_list(list_: List) -> Dict:
|
||||
test_dict: Dict[str, Any] = dict()
|
||||
grouped = itertools.groupby(list_, key=lambda x: x.split("__")[0])
|
||||
for key, group in grouped:
|
||||
group_list = list(group)
|
||||
@ -29,14 +29,14 @@ class Model(NewBaseModel):
|
||||
|
||||
@classmethod
|
||||
def from_row(
|
||||
cls,
|
||||
row: sqlalchemy.engine.ResultProxy,
|
||||
select_related: List = None,
|
||||
related_models: Any = None,
|
||||
previous_table: str = None,
|
||||
) -> Union["Model", Tuple["Model", dict]]:
|
||||
cls,
|
||||
row: sqlalchemy.engine.ResultProxy,
|
||||
select_related: List = None,
|
||||
related_models: Any = None,
|
||||
previous_table: str = None,
|
||||
) -> Optional["Model"]:
|
||||
|
||||
item = {}
|
||||
item: Dict[str, Any] = {}
|
||||
select_related = select_related or []
|
||||
related_models = related_models or []
|
||||
if select_related:
|
||||
@ -44,17 +44,20 @@ class Model(NewBaseModel):
|
||||
|
||||
# breakpoint()
|
||||
if (
|
||||
previous_table
|
||||
and previous_table in cls.Meta.model_fields
|
||||
and issubclass(cls.Meta.model_fields[previous_table], ManyToManyField)
|
||||
previous_table
|
||||
and previous_table in cls.Meta.model_fields
|
||||
and issubclass(cls.Meta.model_fields[previous_table], ManyToManyField)
|
||||
):
|
||||
previous_table = cls.Meta.model_fields[
|
||||
previous_table
|
||||
].through.Meta.tablename
|
||||
|
||||
table_prefix = cls.Meta.alias_manager.resolve_relation_join(
|
||||
previous_table, cls.Meta.table.name
|
||||
)
|
||||
if previous_table:
|
||||
table_prefix = cls.Meta.alias_manager.resolve_relation_join(
|
||||
previous_table, cls.Meta.table.name
|
||||
)
|
||||
else:
|
||||
table_prefix = ''
|
||||
previous_table = cls.Meta.table.name
|
||||
|
||||
item = cls.populate_nested_models_from_row(
|
||||
@ -67,11 +70,11 @@ class Model(NewBaseModel):
|
||||
|
||||
@classmethod
|
||||
def populate_nested_models_from_row(
|
||||
cls,
|
||||
item: dict,
|
||||
row: sqlalchemy.engine.ResultProxy,
|
||||
related_models: Any,
|
||||
previous_table: sqlalchemy.Table,
|
||||
cls,
|
||||
item: dict,
|
||||
row: sqlalchemy.engine.ResultProxy,
|
||||
related_models: Any,
|
||||
previous_table: sqlalchemy.Table,
|
||||
) -> dict:
|
||||
for related in related_models:
|
||||
if isinstance(related_models, dict) and related_models[related]:
|
||||
@ -90,7 +93,7 @@ class Model(NewBaseModel):
|
||||
|
||||
@classmethod
|
||||
def extract_prefixed_table_columns( # noqa CCR001
|
||||
cls, item: dict, row: sqlalchemy.engine.result.ResultProxy, table_prefix: str
|
||||
cls, item: dict, row: sqlalchemy.engine.result.ResultProxy, table_prefix: str
|
||||
) -> dict:
|
||||
for column in cls.Meta.table.columns:
|
||||
if column.name not in item:
|
||||
@ -106,7 +109,7 @@ class Model(NewBaseModel):
|
||||
async def save(self) -> "Model":
|
||||
self_fields = self._extract_model_db_fields()
|
||||
|
||||
if not self.pk and self.Meta.model_fields.get(self.Meta.pkname).autoincrement:
|
||||
if not self.pk and self.Meta.model_fields[self.Meta.pkname].autoincrement:
|
||||
self_fields.pop(self.Meta.pkname, None)
|
||||
self_fields = self.objects._populate_default_values(self_fields)
|
||||
expr = self.Meta.table.insert()
|
||||
@ -138,5 +141,7 @@ class Model(NewBaseModel):
|
||||
async def load(self) -> "Model":
|
||||
expr = self.Meta.table.select().where(self.pk_column == self.pk)
|
||||
row = await self.Meta.database.fetch_one(expr)
|
||||
if not row: # pragma nocover
|
||||
raise ValueError('Instance was deleted from database and cannot be refreshed')
|
||||
self.from_dict(dict(row))
|
||||
return self
|
||||
|
||||
@ -1,5 +1,5 @@
|
||||
import inspect
|
||||
from typing import List, Optional, Set, TYPE_CHECKING, Type, TypeVar, Union
|
||||
from typing import List, Optional, Set, TYPE_CHECKING, Type, TypeVar, Union, Dict
|
||||
|
||||
import ormar
|
||||
from ormar.exceptions import RelationshipInstanceError
|
||||
@ -9,6 +9,7 @@ from ormar.models.metaclass import ModelMeta
|
||||
|
||||
if TYPE_CHECKING: # pragma no cover
|
||||
from ormar import Model
|
||||
from ormar.models import NewBaseModel
|
||||
|
||||
Field = TypeVar("Field", bound=BaseField)
|
||||
|
||||
@ -17,10 +18,10 @@ class ModelTableProxy:
|
||||
if TYPE_CHECKING: # pragma no cover
|
||||
Meta: ModelMeta
|
||||
|
||||
def dict(): # noqa A003
|
||||
def dict(self): # noqa A003
|
||||
raise NotImplementedError # pragma no cover
|
||||
|
||||
def _extract_own_model_fields(self) -> dict:
|
||||
def _extract_own_model_fields(self) -> Dict:
|
||||
related_names = self.extract_related_names()
|
||||
self_fields = {k: v for k, v in self.dict().items() if k not in related_names}
|
||||
return self_fields
|
||||
@ -34,7 +35,7 @@ class ModelTableProxy:
|
||||
return self_fields
|
||||
|
||||
@classmethod
|
||||
def substitute_models_with_pks(cls, model_dict: dict) -> dict:
|
||||
def substitute_models_with_pks(cls, model_dict: Dict) -> Dict:
|
||||
for field in cls.extract_related_names():
|
||||
field_value = model_dict.get(field, None)
|
||||
if field_value is not None:
|
||||
@ -80,7 +81,7 @@ class ModelTableProxy:
|
||||
related_names.add(name)
|
||||
return related_names
|
||||
|
||||
def _extract_model_db_fields(self) -> dict:
|
||||
def _extract_model_db_fields(self) -> Dict:
|
||||
self_fields = self._extract_own_model_fields()
|
||||
self_fields = {
|
||||
k: v for k, v in self_fields.items() if k in self.Meta.table.columns
|
||||
@ -92,7 +93,9 @@ class ModelTableProxy:
|
||||
return self_fields
|
||||
|
||||
@staticmethod
|
||||
def resolve_relation_name(item: "Model", related: "Model") -> Optional[str]:
|
||||
def resolve_relation_name(
|
||||
item: Union["NewBaseModel", Type["NewBaseModel"]], related: Union["NewBaseModel", Type["NewBaseModel"]]
|
||||
) -> str:
|
||||
for name, field in item.Meta.model_fields.items():
|
||||
if issubclass(field, ForeignKeyField):
|
||||
# fastapi is creating clones of response model
|
||||
@ -100,11 +103,14 @@ class ModelTableProxy:
|
||||
# so we need to compare Meta too as this one is copied as is
|
||||
if field.to == related.__class__ or field.to.Meta == related.Meta:
|
||||
return name
|
||||
raise ValueError(
|
||||
f"No relation between {item.get_name()} and {related.get_name()}"
|
||||
) # pragma nocover
|
||||
|
||||
@staticmethod
|
||||
def resolve_relation_field(
|
||||
item: Union["Model", Type["Model"]], related: Union["Model", Type["Model"]]
|
||||
) -> Type[Field]:
|
||||
) -> Union[Type[BaseField], Type[ForeignKeyField]]:
|
||||
name = ModelTableProxy.resolve_relation_name(item, related)
|
||||
to_field = item.Meta.model_fields.get(name)
|
||||
if not to_field: # pragma no cover
|
||||
@ -116,7 +122,7 @@ class ModelTableProxy:
|
||||
|
||||
@classmethod
|
||||
def merge_instances_list(cls, result_rows: List["Model"]) -> List["Model"]:
|
||||
merged_rows = []
|
||||
merged_rows: List["Model"] = []
|
||||
for index, model in enumerate(result_rows):
|
||||
if index > 0 and model.pk == merged_rows[-1].pk:
|
||||
merged_rows[-1] = cls.merge_two_instances(model, merged_rows[-1])
|
||||
|
||||
@ -3,13 +3,13 @@ import uuid
|
||||
from typing import (
|
||||
AbstractSet,
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
)
|
||||
|
||||
@ -39,7 +39,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
__slots__ = ("_orm_id", "_orm_saved", "_orm")
|
||||
|
||||
if TYPE_CHECKING: # pragma no cover
|
||||
__model_fields__: Dict[str, TypeVar[BaseField]]
|
||||
__model_fields__: Dict[str, Type[BaseField]]
|
||||
__table__: sqlalchemy.Table
|
||||
__fields__: Dict[str, pydantic.fields.ModelField]
|
||||
__pydantic_model__: Type[BaseModel]
|
||||
@ -84,7 +84,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
for k, v in kwargs.items()
|
||||
}
|
||||
|
||||
values, fields_set, validation_error = pydantic.validate_model(self, kwargs)
|
||||
values, fields_set, validation_error = pydantic.validate_model(self, kwargs) # type: ignore
|
||||
if validation_error and not pk_only:
|
||||
raise validation_error
|
||||
|
||||
@ -134,13 +134,14 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
) -> Optional[Union["Model", List["Model"]]]:
|
||||
if item in self._orm:
|
||||
return self._orm.get(item)
|
||||
return None
|
||||
|
||||
def __eq__(self, other: "Model") -> bool:
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if isinstance(other, NewBaseModel):
|
||||
return self.__same__(other)
|
||||
return super().__eq__(other) # pragma no cover
|
||||
|
||||
def __same__(self, other: "Model") -> bool:
|
||||
def __same__(self, other: "NewBaseModel") -> bool:
|
||||
return (
|
||||
self._orm_id == other._orm_id
|
||||
or self.dict() == other.dict()
|
||||
@ -205,19 +206,19 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
dict_instance[field] = None
|
||||
return dict_instance
|
||||
|
||||
def from_dict(self, value_dict: Dict) -> "Model":
|
||||
def from_dict(self, value_dict: Dict) -> "NewBaseModel":
|
||||
for key, value in value_dict.items():
|
||||
setattr(self, key, value)
|
||||
return self
|
||||
|
||||
def _convert_json(self, column_name: str, value: Any, op: str) -> Union[str, dict]:
|
||||
def _convert_json(self, column_name: str, value: Any, op: str) -> Union[str, Dict]:
|
||||
if not self._is_conversion_to_json_needed(column_name):
|
||||
return value
|
||||
|
||||
condition = (
|
||||
isinstance(value, str) if op == "loads" else not isinstance(value, str)
|
||||
)
|
||||
operand = json.loads if op == "loads" else json.dumps
|
||||
operand: Callable[[Any], Any] = json.loads if op == "loads" else json.dumps
|
||||
|
||||
if condition:
|
||||
try:
|
||||
@ -227,4 +228,4 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
return value
|
||||
|
||||
def _is_conversion_to_json_needed(self, column_name: str) -> bool:
|
||||
return self.Meta.model_fields.get(column_name).__type__ == pydantic.Json
|
||||
return self.Meta.model_fields[column_name].__type__ == pydantic.Json
|
||||
|
||||
Reference in New Issue
Block a user