Merge pull request #183 from collerek/large_binary

Add large binary, support for native pydantic fields, examples in openapi
This commit is contained in:
collerek
2021-05-02 15:11:46 +02:00
committed by GitHub
29 changed files with 790 additions and 110 deletions

View File

@ -53,6 +53,7 @@ from ormar.fields import (
ForeignKeyField,
Integer,
JSON,
LargeBinary,
ManyToMany,
ManyToManyField,
String,
@ -75,7 +76,7 @@ class UndefinedType: # pragma no cover
Undefined = UndefinedType()
__version__ = "0.10.5"
__version__ = "0.10.6"
__all__ = [
"Integer",
"BigInteger",
@ -124,4 +125,5 @@ __all__ = [
"EncryptBackends",
"ENCODERS_MAP",
"DECODERS_MAP",
"LargeBinary",
]

View File

@ -16,6 +16,7 @@ from ormar.fields.model_fields import (
Float,
Integer,
JSON,
LargeBinary,
String,
Text,
Time,
@ -50,4 +51,5 @@ __all__ = [
"EncryptBackend",
"DECODERS_MAP",
"ENCODERS_MAP",
"LargeBinary",
]

View File

@ -1,3 +1,4 @@
import warnings
from typing import Any, Dict, List, Optional, TYPE_CHECKING, Type, Union
import sqlalchemy
@ -30,6 +31,7 @@ class BaseField(FieldInfo):
def __init__(self, **kwargs: Any) -> None:
self.__type__: type = kwargs.pop("__type__", None)
self.__sample__: type = kwargs.pop("__sample__", None)
self.related_name = kwargs.pop("related_name", None)
self.column_type: sqlalchemy.Column = kwargs.pop("column_type", None)
@ -43,6 +45,14 @@ class BaseField(FieldInfo):
self.index: bool = kwargs.pop("index", False)
self.unique: bool = kwargs.pop("unique", False)
self.pydantic_only: bool = kwargs.pop("pydantic_only", False)
if self.pydantic_only:
warnings.warn(
"Parameter `pydantic_only` is deprecated and will "
"be removed in one of the next releases.\n You can declare "
"pydantic fields in a normal way. \n Check documentation: "
"https://collerek.github.io/ormar/fields/pydantic-fields",
DeprecationWarning,
)
self.choices: typing.Sequence = kwargs.pop("choices", False)
self.virtual: bool = kwargs.pop(

View File

@ -80,7 +80,7 @@ def create_dummy_model(
:rtype: pydantic.BaseModel
"""
alias = (
"".join(choices(string.ascii_uppercase, k=2)) + uuid.uuid4().hex[:4]
"".join(choices(string.ascii_uppercase, k=6)) # + uuid.uuid4().hex[:4]
).lower()
fields = {f"{pk_field.name}": (pk_field.__type__, None)}

View File

@ -62,6 +62,7 @@ class ModelFieldFactory:
_bases: Any = (BaseField,)
_type: Any = None
_sample: Any = None
def __new__(cls, *args: Any, **kwargs: Any) -> BaseField: # type: ignore
cls.validate(**kwargs)
@ -80,6 +81,7 @@ class ModelFieldFactory:
namespace = dict(
__type__=cls._type,
__sample__=cls._sample,
alias=kwargs.pop("name", None),
name=None,
primary_key=primary_key,
@ -129,6 +131,7 @@ class String(ModelFieldFactory, str):
"""
_type = str
_sample = "string"
def __new__( # type: ignore # noqa CFQ002
cls,
@ -185,6 +188,7 @@ class Integer(ModelFieldFactory, int):
"""
_type = int
_sample = 0
def __new__( # type: ignore
cls,
@ -232,6 +236,7 @@ class Text(ModelFieldFactory, str):
"""
_type = str
_sample = "text"
def __new__( # type: ignore
cls, *, allow_blank: bool = True, strip_whitespace: bool = False, **kwargs: Any
@ -267,6 +272,7 @@ class Float(ModelFieldFactory, float):
"""
_type = float
_sample = 0.0
def __new__( # type: ignore
cls,
@ -316,6 +322,7 @@ else:
"""
_type = bool
_sample = True
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
@ -337,6 +344,7 @@ class DateTime(ModelFieldFactory, datetime.datetime):
"""
_type = datetime.datetime
_sample = "datetime"
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
@ -358,6 +366,7 @@ class Date(ModelFieldFactory, datetime.date):
"""
_type = datetime.date
_sample = "date"
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
@ -379,6 +388,7 @@ class Time(ModelFieldFactory, datetime.time):
"""
_type = datetime.time
_sample = "time"
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
@ -400,6 +410,7 @@ class JSON(ModelFieldFactory, pydantic.Json):
"""
_type = pydantic.Json
_sample = '{"json": "json"}'
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
@ -415,12 +426,61 @@ class JSON(ModelFieldFactory, pydantic.Json):
return sqlalchemy.JSON()
class LargeBinary(ModelFieldFactory, bytes):
"""
LargeBinary field factory that construct Field classes and populated their values.
"""
_type = bytes
_sample = "bytes"
def __new__( # type: ignore # noqa CFQ002
cls, *, max_length: int = None, **kwargs: Any
) -> BaseField: # type: ignore
kwargs = {
**kwargs,
**{
k: v
for k, v in locals().items()
if k not in ["cls", "__class__", "kwargs"]
},
}
return super().__new__(cls, **kwargs)
@classmethod
def get_column_type(cls, **kwargs: Any) -> Any:
"""
Return proper type of db column for given field type.
Accepts required and optional parameters that each column type accepts.
:param kwargs: key, value pairs of sqlalchemy options
:type kwargs: Any
:return: initialized column with proper options
:rtype: sqlalchemy Column
"""
return sqlalchemy.LargeBinary(length=kwargs.get("max_length"))
@classmethod
def validate(cls, **kwargs: Any) -> None:
"""
Used to validate if all required parameters on a given field type are set.
:param kwargs: all params passed during construction
:type kwargs: Any
"""
max_length = kwargs.get("max_length", None)
if max_length is None or max_length <= 0:
raise ModelDefinitionError(
"Parameter max_length is required for field LargeBinary"
)
class BigInteger(Integer, int):
"""
BigInteger field factory that construct Field classes and populated their values.
"""
_type = int
_sample = 0
def __new__( # type: ignore
cls,
@ -468,6 +528,7 @@ class Decimal(ModelFieldFactory, decimal.Decimal):
"""
_type = decimal.Decimal
_sample = 0.0
def __new__( # type: ignore # noqa CFQ002
cls,
@ -540,6 +601,7 @@ class UUID(ModelFieldFactory, uuid.UUID):
"""
_type = uuid.UUID
_sample = "uuid"
def __new__( # type: ignore # noqa CFQ002
cls, *, uuid_format: str = "hex", **kwargs: Any

View File

@ -3,6 +3,7 @@ import itertools
import sqlite3
from typing import Any, Dict, List, TYPE_CHECKING, Tuple, Type
import pydantic
from pydantic.typing import ForwardRef
import ormar # noqa: I100
from ormar.models.helpers.pydantic import populate_pydantic_default_values
@ -61,6 +62,12 @@ def populate_default_options_values(
else:
new_model.Meta.requires_ref_update = False
new_model._json_fields = {
name
for name, field in new_model.Meta.model_fields.items()
if field.__type__ == pydantic.Json
}
class Connection(sqlite3.Connection):
def __init__(self, *args: Any, **kwargs: Any) -> None: # pragma: no cover

View File

@ -98,6 +98,7 @@ def get_pydantic_base_orm_config() -> Type[pydantic.BaseConfig]:
class Config(pydantic.BaseConfig):
orm_mode = True
validate_assignment = True
return Config

View File

@ -1,8 +1,9 @@
import datetime
import decimal
import numbers
import uuid
from enum import Enum
from typing import Any, Dict, List, TYPE_CHECKING, Tuple, Type
from typing import Any, Dict, List, Set, TYPE_CHECKING, Tuple, Type, Union
try:
import orjson as json
@ -10,11 +11,13 @@ except ImportError: # pragma: no cover
import json # type: ignore
import pydantic
from pydantic.fields import SHAPE_LIST
from pydantic.main import SchemaExtraCallable
import ormar # noqa: I100, I202
from ormar.fields import BaseField
from ormar.models.helpers.models import meta_field_not_set
from ormar.queryset.utils import translate_list_to_dict
if TYPE_CHECKING: # pragma no cover
from ormar import Model
@ -73,6 +76,8 @@ def convert_choices_if_needed( # noqa: CCR001
else value
)
choices = [round(float(o), precision) for o in choices]
elif field.__type__ == bytes:
value = value if isinstance(value, bytes) else value.encode("utf-8")
return value, choices
@ -114,12 +119,109 @@ def choices_validator(cls: Type["Model"], values: Dict[str, Any]) -> Dict[str, A
return values
def generate_model_example(model: Type["Model"], relation_map: Dict = None) -> Dict:
"""
Generates example to be included in schema in fastapi.
:param model: ormar.Model
:type model: Type["Model"]
:param relation_map: dict with relations to follow
:type relation_map: Optional[Dict]
:return:
:rtype: Dict[str, int]
"""
example: Dict[str, Any] = dict()
relation_map = (
relation_map
if relation_map is not None
else translate_list_to_dict(model._iterate_related_models())
)
for name, field in model.Meta.model_fields.items():
if not field.is_relation:
example[name] = field.__sample__
elif isinstance(relation_map, dict) and name in relation_map:
example[name] = get_nested_model_example(
name=name, field=field, relation_map=relation_map
)
to_exclude = {name for name in model.Meta.model_fields}
pydantic_repr = generate_pydantic_example(pydantic_model=model, exclude=to_exclude)
example.update(pydantic_repr)
return example
def get_nested_model_example(
name: str, field: "BaseField", relation_map: Dict
) -> Union[List, Dict]:
"""
Gets representation of nested model.
:param name: name of the field to follow
:type name: str
:param field: ormar field
:type field: BaseField
:param relation_map: dict with relation map
:type relation_map: Dict
:return: nested model or list of nested model repr
:rtype: Union[List, Dict]
"""
value = generate_model_example(field.to, relation_map=relation_map.get(name, {}))
new_value: Union[List, Dict] = [value] if field.is_multi or field.virtual else value
return new_value
def generate_pydantic_example(
pydantic_model: Type[pydantic.BaseModel], exclude: Set = None
) -> Dict:
"""
Generates dict with example.
:param pydantic_model: model to parse
:type pydantic_model: Type[pydantic.BaseModel]
:param exclude: list of fields to exclude
:type exclude: Optional[Set]
:return: dict with fields and sample values
:rtype: Dict
"""
example: Dict[str, Any] = dict()
exclude = exclude or set()
name_to_check = [name for name in pydantic_model.__fields__ if name not in exclude]
for name in name_to_check:
field = pydantic_model.__fields__[name]
type_ = field.type_
if field.shape == SHAPE_LIST:
example[name] = [get_pydantic_example_repr(type_)]
else:
example[name] = get_pydantic_example_repr(type_)
return example
def get_pydantic_example_repr(type_: Any) -> Any:
"""
Gets sample representation of pydantic field for example dict.
:param type_: type of pydantic field
:type type_: Any
:return: representation to include in example
:rtype: Any
"""
if issubclass(type_, (numbers.Number, decimal.Decimal)):
return 0
elif issubclass(type_, pydantic.BaseModel):
return generate_pydantic_example(pydantic_model=type_)
else:
return "string"
def construct_modify_schema_function(fields_with_choices: List) -> SchemaExtraCallable:
"""
Modifies the schema to include fields with choices validator.
Those fields will be displayed in schema as Enum types with available choices
values listed next to them.
Note that schema extra has to be a function, otherwise it's called to soon
before all the relations are expanded.
:param fields_with_choices: list of fields with choices validation
:type fields_with_choices: List
:return: callable that will be run by pydantic to modify the schema
@ -131,6 +233,28 @@ def construct_modify_schema_function(fields_with_choices: List) -> SchemaExtraCa
if field_id in fields_with_choices:
prop["enum"] = list(model.Meta.model_fields[field_id].choices)
prop["description"] = prop.get("description", "") + "An enumeration."
schema["example"] = generate_model_example(model=model)
if "Main base class of ormar Model." in schema.get("description", ""):
schema["description"] = f"{model.__name__}"
return staticmethod(schema_extra) # type: ignore
def construct_schema_function_without_choices() -> SchemaExtraCallable:
"""
Modifies model example and description if needed.
Note that schema extra has to be a function, otherwise it's called to soon
before all the relations are expanded.
:return: callable that will be run by pydantic to modify the schema
:rtype: Callable
"""
def schema_extra(schema: Dict[str, Any], model: Type["Model"]) -> None:
schema["example"] = generate_model_example(model=model)
if "Main base class of ormar Model." in schema.get("description", ""):
schema["description"] = f"{model.__name__}"
return staticmethod(schema_extra) # type: ignore
@ -160,3 +284,5 @@ def populate_choices_validators(model: Type["Model"]) -> None: # noqa CCR001
model.Config.schema_extra = construct_modify_schema_function(
fields_with_choices=fields_with_choices
)
else:
model.Config.schema_extra = construct_schema_function_without_choices()

View File

@ -94,6 +94,7 @@ def add_cached_properties(new_model: Type["Model"]) -> None:
new_model._related_fields = None
new_model._pydantic_fields = {name for name in new_model.__fields__}
new_model._choices_fields = set()
new_model._json_fields = set()
def add_property_fields(new_model: Type["Model"], attrs: Dict) -> None: # noqa: CCR001

View File

@ -48,7 +48,7 @@ class RelationMixin:
:return: list of related fields
:rtype: List
"""
if isinstance(cls._related_fields, List):
if cls._related_fields is not None:
return cls._related_fields
related_fields = []
@ -66,7 +66,7 @@ class RelationMixin:
:return: set of related through fields names
:rtype: Set
"""
if isinstance(cls._through_names, Set):
if cls._through_names is not None:
return cls._through_names
related_names = set()
@ -86,7 +86,7 @@ class RelationMixin:
:return: set of related fields names
:rtype: Set
"""
if isinstance(cls._related_names, Set):
if cls._related_names is not None:
return cls._related_names
related_names = set()

View File

@ -275,12 +275,12 @@ class SavePrepareMixin(RelationMixin, AliasMixin):
:rtype: int
"""
for field in fields_list:
value = getattr(self, field.name) or []
if not isinstance(value, list):
value = [value]
for val in value:
values = getattr(self, field.name) or []
if not isinstance(values, list):
values = [values]
for value in values:
if follow:
update_count = await val.save_related(
update_count = await value.save_related(
follow=follow,
save_all=save_all,
relation_map=self._skip_ellipsis( # type: ignore
@ -291,8 +291,8 @@ class SavePrepareMixin(RelationMixin, AliasMixin):
relation_field=field,
)
else:
update_count = await val._upsert_model(
instance=val,
update_count = await value._upsert_model(
instance=value,
save_all=save_all,
previous_model=self,
relation_field=field,

View File

@ -1,5 +1,4 @@
import sys
import uuid
from typing import (
AbstractSet,
Any,
@ -12,6 +11,7 @@ from typing import (
Sequence,
Set,
TYPE_CHECKING,
Tuple,
Type,
Union,
cast,
@ -86,6 +86,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
_choices_fields: Optional[Set]
_pydantic_fields: Set
_quick_access_fields: Set
_json_fields: Set
Meta: ModelMeta
# noinspection PyMissingConstructor
@ -123,53 +124,12 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
:type kwargs: Any
"""
self._verify_model_can_be_initialized()
object.__setattr__(self, "_orm_id", uuid.uuid4().hex)
object.__setattr__(self, "_orm_saved", False)
object.__setattr__(self, "_pk_column", None)
object.__setattr__(
self,
"_orm",
RelationsManager(
related_fields=self.extract_related_fields(), owner=cast("Model", self),
),
)
self._initialize_internal_attributes()
pk_only = kwargs.pop("__pk_only__", False)
object.__setattr__(self, "__pk_only__", pk_only)
excluded: Set[str] = kwargs.pop("__excluded__", set())
if "pk" in kwargs:
kwargs[self.Meta.pkname] = kwargs.pop("pk")
# build the models to set them and validate but don't register
# also remove property fields values from validation
try:
new_kwargs: Dict[str, Any] = {
k: self._convert_json(
k,
self.Meta.model_fields[k].expand_relationship(
v, self, to_register=False,
),
"dumps",
)
for k, v in kwargs.items()
if k not in object.__getattribute__(self, "Meta").property_fields
}
except KeyError as e:
raise ModelError(
f"Unknown field '{e.args[0]}' for model {self.get_name(lower=False)}"
)
# explicitly set None to excluded fields
# as pydantic populates them with default if set
for field_to_nullify in excluded:
new_kwargs[field_to_nullify] = None
# extract through fields
through_tmp_dict = dict()
for field_name in self.extract_through_names():
through_tmp_dict[field_name] = new_kwargs.pop(field_name, None)
new_kwargs, through_tmp_dict = self._process_kwargs(kwargs)
values, fields_set, validation_error = pydantic.validate_model(
self, new_kwargs # type: ignore
@ -182,10 +142,10 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
# add back through fields
new_kwargs.update(through_tmp_dict)
model_fields = object.__getattribute__(self, "Meta").model_fields
# register the columns models after initialization
for related in self.extract_related_names().union(self.extract_through_names()):
self.Meta.model_fields[related].expand_relationship(
model_fields[related].expand_relationship(
new_kwargs.get(related), self, to_register=True,
)
@ -243,7 +203,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
else:
if name in object.__getattribute__(self, "_choices_fields"):
validate_choices(field=self.Meta.model_fields[name], value=value)
super().__setattr__(name, value)
super().__setattr__(name, self._convert_json(name, value, op="dumps"))
self.set_save_status(False)
def __getattribute__(self, item: str) -> Any: # noqa: CCR001
@ -306,15 +266,89 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
:return: None
:rtype: None
"""
if self.Meta.abstract:
if object.__getattribute__(self, "Meta").abstract:
raise ModelError(f"You cannot initialize abstract model {self.get_name()}")
if self.Meta.requires_ref_update:
if object.__getattribute__(self, "Meta").requires_ref_update:
raise ModelError(
f"Model {self.get_name()} has not updated "
f"ForwardRefs. \nBefore using the model you "
f"need to call update_forward_refs()."
)
def _process_kwargs(self, kwargs: Dict) -> Tuple[Dict, Dict]:
"""
Initializes nested models.
Removes property_fields
Checks if field is in the model fields or pydatnic fields.
Nullifies fields that should be excluded.
Extracts through models from kwargs into temporary dict.
:param kwargs: passed to init keyword arguments
:type kwargs: Dict
:return: modified kwargs
:rtype: Tuple[Dict, Dict]
"""
meta = object.__getattribute__(self, "Meta")
property_fields = meta.property_fields
model_fields = meta.model_fields
pydantic_fields = object.__getattribute__(self, "__fields__")
# remove property fields
for prop_filed in property_fields:
kwargs.pop(prop_filed, None)
excluded: Set[str] = kwargs.pop("__excluded__", set())
if "pk" in kwargs:
kwargs[meta.pkname] = kwargs.pop("pk")
# extract through fields
through_tmp_dict = dict()
for field_name in self.extract_through_names():
through_tmp_dict[field_name] = kwargs.pop(field_name, None)
try:
new_kwargs: Dict[str, Any] = {
k: self._convert_json(
k,
model_fields[k].expand_relationship(v, self, to_register=False,)
if k in model_fields
else (v if k in pydantic_fields else model_fields[k]),
"dumps",
)
for k, v in kwargs.items()
}
except KeyError as e:
raise ModelError(
f"Unknown field '{e.args[0]}' for model {self.get_name(lower=False)}"
)
# explicitly set None to excluded fields
# as pydantic populates them with default if set
for field_to_nullify in excluded:
new_kwargs[field_to_nullify] = None
return new_kwargs, through_tmp_dict
def _initialize_internal_attributes(self) -> None:
"""
Initializes internal attributes during __init__()
:rtype: None
"""
# object.__setattr__(self, "_orm_id", uuid.uuid4().hex)
object.__setattr__(self, "_orm_saved", False)
object.__setattr__(self, "_pk_column", None)
object.__setattr__(
self,
"_orm",
RelationsManager(
related_fields=self.extract_related_fields(), owner=cast("Model", self),
),
)
def _extract_related_model_instead_of_field(
self, item: str
) -> Optional[Union["Model", Sequence["Model"]]]:
@ -355,8 +389,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
:rtype: bool
"""
return (
self._orm_id == other._orm_id
or (self.pk == other.pk and self.pk is not None)
# self._orm_id == other._orm_id
(self.pk == other.pk and self.pk is not None)
or (
(self.pk is None and other.pk is None)
and {
@ -740,7 +774,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
:return: converted value if needed, else original value
:rtype: Any
"""
if not self._is_conversion_to_json_needed(column_name):
if column_name not in object.__getattribute__(self, "_json_fields"):
return value
condition = (
@ -757,20 +791,6 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
pass
return value.decode("utf-8") if isinstance(value, bytes) else value
def _is_conversion_to_json_needed(self, column_name: str) -> bool:
"""
Checks if given column name is related to JSON field.
:param column_name: name of the field
:type column_name: str
:return: result of the check
:rtype: bool
"""
return (
column_name in self.Meta.model_fields
and self.Meta.model_fields[column_name].__type__ == pydantic.Json
)
def _extract_own_model_fields(self) -> Dict:
"""
Returns a dictionary with field names and values for fields that are not