refactor into descriptors, cleanup, docs update
This commit is contained in:
@ -138,6 +138,35 @@ LargeBinary length is used in some backend (i.e. mysql) to determine the size of
|
|||||||
in other backends it's simply ignored yet in ormar it's always required. It should be max
|
in other backends it's simply ignored yet in ormar it's always required. It should be max
|
||||||
size of the file/bytes in bytes.
|
size of the file/bytes in bytes.
|
||||||
|
|
||||||
|
`LargeBinary` has also optional `represent_as_base64_str: bool = False` flag.
|
||||||
|
When set to `True` `ormar` will auto-convert bytes value to base64 decoded string,
|
||||||
|
you can also set value by passing a base64 encoded string.
|
||||||
|
|
||||||
|
That way you can i.e. set the value by API, even if value is not `utf-8` compatible and would otherwise fail during json conversion.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import base64
|
||||||
|
... # other imports skipped for brevity
|
||||||
|
class LargeBinaryStr(ormar.Model):
|
||||||
|
class Meta:
|
||||||
|
tablename = "my_str_blobs"
|
||||||
|
metadata = metadata
|
||||||
|
database = database
|
||||||
|
|
||||||
|
id: int = ormar.Integer(primary_key=True)
|
||||||
|
test_binary: str = ormar.LargeBinary(
|
||||||
|
max_length=100000, represent_as_base64_str=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# set non utf-8 compliant value - note this can be passed by api (i.e. fastapi) in json
|
||||||
|
item = LargeBinaryStr(test_binary=base64.b64encode(b"\xc3\x28").decode())
|
||||||
|
|
||||||
|
assert item.test_binary == base64.b64encode(b"\xc3\x28").decode()
|
||||||
|
|
||||||
|
# technical note that underlying value is still bytes and will be saved as so
|
||||||
|
assert item.__dict__["test_binary"] == b"\xc3\x28"
|
||||||
|
```
|
||||||
|
|
||||||
### UUID
|
### UUID
|
||||||
|
|
||||||
`UUID(uuid_format: str = 'hex')` has no required parameters.
|
`UUID(uuid_format: str = 'hex')` has no required parameters.
|
||||||
|
|||||||
@ -2,8 +2,11 @@
|
|||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|
||||||
* Add `exclude_primary_keys` flag to `dict()` method that allows to exclude all primary key columns in the resulting dictionaru. [#164](https://github.com/collerek/ormar/issues/164)
|
* Add `exclude_primary_keys: bool = False` flag to `dict()` method that allows to exclude all primary key columns in the resulting dictionaru. [#164](https://github.com/collerek/ormar/issues/164)
|
||||||
* Add `exclude_through_models` flag to `dict()` that allows excluding all through models from `ManyToMany` relations [#164](https://github.com/collerek/ormar/issues/164)
|
* Add `exclude_through_models: bool = False` flag to `dict()` that allows excluding all through models from `ManyToMany` relations [#164](https://github.com/collerek/ormar/issues/164)
|
||||||
|
* Add `represent_as_base64_str: bool = False` parameter that allows conversion of bytes `LargeBinary` field to base64 encoded string. String is returned in `dict()`,
|
||||||
|
on access to attribute and string is converted to bytes on setting. Data in database is stored as bytes. [#187](https://github.com/collerek/ormar/issues/187)
|
||||||
|
* Add `pk` alias to allow field access by `Model.pk` in filters and order by clauses (python style)
|
||||||
|
|
||||||
## 🐛 Fixes
|
## 🐛 Fixes
|
||||||
|
|
||||||
@ -13,6 +16,7 @@
|
|||||||
## 💬 Other
|
## 💬 Other
|
||||||
|
|
||||||
* Provide a guide and samples of `dict()` parameters in the [docs](https://collerek.github.io/ormar/models/methods/)
|
* Provide a guide and samples of `dict()` parameters in the [docs](https://collerek.github.io/ormar/models/methods/)
|
||||||
|
* Major refactor of getting/setting attributes from magic methods into descriptors -> noticeable performance improvement
|
||||||
|
|
||||||
# 0.10.6
|
# 0.10.6
|
||||||
|
|
||||||
|
|||||||
@ -95,8 +95,9 @@ class BaseField(FieldInfo):
|
|||||||
self.ormar_default: Any = kwargs.pop("default", None)
|
self.ormar_default: Any = kwargs.pop("default", None)
|
||||||
self.server_default: Any = kwargs.pop("server_default", None)
|
self.server_default: Any = kwargs.pop("server_default", None)
|
||||||
|
|
||||||
self.represent_as_base64_str: bool = kwargs.pop("represent_as_base64_str", False)
|
self.represent_as_base64_str: bool = kwargs.pop(
|
||||||
self.use_base64: bool = kwargs.pop("use_base64", False)
|
"represent_as_base64_str", False
|
||||||
|
)
|
||||||
|
|
||||||
for name, value in kwargs.items():
|
for name, value in kwargs.items():
|
||||||
setattr(self, name, value)
|
setattr(self, name, value)
|
||||||
|
|||||||
@ -1,32 +0,0 @@
|
|||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
|
|
||||||
class OrmarBytes(bytes):
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def __get_validators__(cls):
|
|
||||||
yield cls.validate
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def validate(cls, v):
|
|
||||||
if not isinstance(v, str):
|
|
||||||
pass
|
|
||||||
return v
|
|
||||||
|
|
||||||
def __get__(self, obj, class_=None):
|
|
||||||
return 'test'
|
|
||||||
|
|
||||||
def __set__(self, obj, value):
|
|
||||||
obj.__dict__['test'] = value
|
|
||||||
|
|
||||||
|
|
||||||
class ModelA(BaseModel):
|
|
||||||
test: OrmarBytes = OrmarBytes()
|
|
||||||
|
|
||||||
|
|
||||||
ModelA.test = OrmarBytes()
|
|
||||||
aa = ModelA(test=b"aa")
|
|
||||||
print(aa.__dict__)
|
|
||||||
print(aa.test)
|
|
||||||
aa.test = 'aas'
|
|
||||||
print(aa.test)
|
|
||||||
@ -435,12 +435,7 @@ class LargeBinary(ModelFieldFactory, bytes):
|
|||||||
_sample = "bytes"
|
_sample = "bytes"
|
||||||
|
|
||||||
def __new__( # type: ignore # noqa CFQ002
|
def __new__( # type: ignore # noqa CFQ002
|
||||||
cls,
|
cls, *, max_length: int, represent_as_base64_str: bool = False, **kwargs: Any
|
||||||
*,
|
|
||||||
max_length: int,
|
|
||||||
use_base64: bool = False,
|
|
||||||
represent_as_base64_str: bool = False,
|
|
||||||
**kwargs: Any
|
|
||||||
) -> BaseField: # type: ignore
|
) -> BaseField: # type: ignore
|
||||||
kwargs = {
|
kwargs = {
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|||||||
@ -1,4 +1,17 @@
|
|||||||
from ormar.models.descriptors.descriptors import PkDescriptor, PropertyDescriptor, \
|
from ormar.models.descriptors.descriptors import (
|
||||||
PydanticDescriptor, \
|
BytesDescriptor,
|
||||||
RelationDescriptor
|
JsonDescriptor,
|
||||||
__all__ = ["PydanticDescriptor", "RelationDescriptor", "PropertyDescriptor", "PkDescriptor"]
|
PkDescriptor,
|
||||||
|
PropertyDescriptor,
|
||||||
|
PydanticDescriptor,
|
||||||
|
RelationDescriptor,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"PydanticDescriptor",
|
||||||
|
"RelationDescriptor",
|
||||||
|
"PropertyDescriptor",
|
||||||
|
"PkDescriptor",
|
||||||
|
"JsonDescriptor",
|
||||||
|
"BytesDescriptor",
|
||||||
|
]
|
||||||
|
|||||||
@ -1,97 +1,143 @@
|
|||||||
import pydantic
|
import base64
|
||||||
|
from typing import Any, TYPE_CHECKING, Type
|
||||||
|
|
||||||
from ormar.models.helpers.validation import validate_choices
|
try:
|
||||||
|
import orjson as json
|
||||||
|
except ImportError: # pragma: no cover
|
||||||
|
import json # type: ignore
|
||||||
|
|
||||||
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
|
from ormar import Model
|
||||||
|
|
||||||
|
|
||||||
class PydanticDescriptor:
|
class PydanticDescriptor:
|
||||||
|
"""
|
||||||
|
Pydantic descriptor simply delegates everything to pydantic model
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
value = object.__getattribute__(instance, "__dict__").get(self.name, None)
|
value = instance.__dict__.get(self.name, None)
|
||||||
value = object.__getattribute__(instance, "_convert_json")(self.name, value,
|
|
||||||
"loads")
|
|
||||||
value = object.__getattribute__(instance, "_convert_bytes")(self.name, value,
|
|
||||||
"read")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance: "Model", value: Any) -> None:
|
||||||
if self.name in object.__getattribute__(instance, "_choices_fields"):
|
|
||||||
validate_choices(field=instance.Meta.model_fields[self.name], value=value)
|
|
||||||
value = object.__getattribute__(instance, '_convert_bytes')(self.name, value,
|
|
||||||
op="write")
|
|
||||||
value = object.__getattribute__(instance, '_convert_json')(self.name, value,
|
|
||||||
op="dumps")
|
|
||||||
instance._internal_set(self.name, value)
|
instance._internal_set(self.name, value)
|
||||||
object.__getattribute__(instance, "set_save_status")(False)
|
instance.set_save_status(False)
|
||||||
|
|
||||||
|
|
||||||
|
class JsonDescriptor:
|
||||||
|
"""
|
||||||
|
Json descriptor dumps/loads strings to actual data on write/read
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
|
value = instance.__dict__.get(self.name, None)
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance: "Model", value: Any) -> None:
|
||||||
|
if not isinstance(value, str):
|
||||||
|
value = json.dumps(value)
|
||||||
|
value = value.decode("utf-8") if isinstance(value, bytes) else value
|
||||||
|
instance._internal_set(self.name, value)
|
||||||
|
instance.set_save_status(False)
|
||||||
|
|
||||||
|
|
||||||
|
class BytesDescriptor:
|
||||||
|
"""
|
||||||
|
Bytes descriptor converts strings to bytes on write and converts bytes to str
|
||||||
|
if represent_as_base64_str flag is set, so the value can be dumped to json
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, name: str) -> None:
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
|
value = instance.__dict__.get(self.name, None)
|
||||||
|
field = instance.Meta.model_fields[self.name]
|
||||||
|
if field.represent_as_base64_str and not isinstance(value, str):
|
||||||
|
value = base64.b64encode(value).decode()
|
||||||
|
return value
|
||||||
|
|
||||||
|
def __set__(self, instance: "Model", value: Any) -> None:
|
||||||
|
field = instance.Meta.model_fields[self.name]
|
||||||
|
if isinstance(value, str):
|
||||||
|
if field.represent_as_base64_str:
|
||||||
|
value = base64.b64decode(value)
|
||||||
|
else:
|
||||||
|
value = value.encode("utf-8")
|
||||||
|
instance._internal_set(self.name, value)
|
||||||
|
instance.set_save_status(False)
|
||||||
|
|
||||||
|
|
||||||
class PkDescriptor:
|
class PkDescriptor:
|
||||||
|
"""
|
||||||
|
As of now it's basically a copy of PydanticDescriptor but that will
|
||||||
|
change in the future with multi column primary keys
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
value = object.__getattribute__(instance, "__dict__").get(self.name, None)
|
value = instance.__dict__.get(self.name, None)
|
||||||
value = object.__getattribute__(instance, "_convert_json")(self.name, value,
|
|
||||||
"loads")
|
|
||||||
value = object.__getattribute__(instance, "_convert_bytes")(self.name, value,
|
|
||||||
"read")
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance: "Model", value: Any) -> None:
|
||||||
if self.name in object.__getattribute__(instance, "_choices_fields"):
|
|
||||||
validate_choices(field=instance.Meta.model_fields[self.name], value=value)
|
|
||||||
value = object.__getattribute__(instance, '_convert_bytes')(self.name, value,
|
|
||||||
op="write")
|
|
||||||
value = object.__getattribute__(instance, '_convert_json')(self.name, value,
|
|
||||||
op="dumps")
|
|
||||||
instance._internal_set(self.name, value)
|
instance._internal_set(self.name, value)
|
||||||
object.__getattribute__(instance, "set_save_status")(False)
|
instance.set_save_status(False)
|
||||||
|
|
||||||
|
|
||||||
class RelationDescriptor:
|
class RelationDescriptor:
|
||||||
|
"""
|
||||||
|
Relation descriptor expands the relation to initialize the related model
|
||||||
|
before setting it to __dict__. Note that expanding also registers the
|
||||||
|
related model in RelationManager.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, name):
|
def __init__(self, name: str) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
if self.name in object.__getattribute__(instance, '_orm'):
|
if self.name in instance._orm:
|
||||||
return object.__getattribute__(instance, '_orm').get(
|
return instance._orm.get(self.name) # type: ignore
|
||||||
self.name) # type: ignore
|
|
||||||
return None # pragma no cover
|
return None # pragma no cover
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance: "Model", value: Any) -> None:
|
||||||
model = (
|
model = instance.Meta.model_fields[self.name].expand_relationship(
|
||||||
object.__getattribute__(instance, "Meta")
|
value=value, child=instance
|
||||||
.model_fields[self.name]
|
|
||||||
.expand_relationship(value=value, child=instance)
|
|
||||||
)
|
)
|
||||||
if isinstance(object.__getattribute__(instance, "__dict__").get(self.name),
|
if isinstance(instance.__dict__.get(self.name), list):
|
||||||
list):
|
|
||||||
# virtual foreign key or many to many
|
# virtual foreign key or many to many
|
||||||
# TODO: Fix double items in dict, no effect on real action ugly repr
|
# TODO: Fix double items in dict, no effect on real action just ugly repr
|
||||||
# if model.pk not in [x.pk for x in related_list]:
|
instance.__dict__[self.name].append(model)
|
||||||
object.__getattribute__(instance, "__dict__")[self.name].append(model)
|
|
||||||
else:
|
else:
|
||||||
# foreign key relation
|
# foreign key relation
|
||||||
object.__getattribute__(instance, "__dict__")[self.name] = model
|
instance.__dict__[self.name] = model
|
||||||
object.__getattribute__(instance, "set_save_status")(False)
|
instance.set_save_status(False)
|
||||||
|
|
||||||
|
|
||||||
class PropertyDescriptor:
|
class PropertyDescriptor:
|
||||||
|
"""
|
||||||
|
Property descriptor handles methods decorated with @property_field decorator.
|
||||||
|
They are read only.
|
||||||
|
"""
|
||||||
|
|
||||||
def __init__(self, name, function):
|
def __init__(self, name: str, function: Any) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.function = function
|
self.function = function
|
||||||
|
|
||||||
def __get__(self, instance, owner):
|
def __get__(self, instance: "Model", owner: Type["Model"]) -> Any:
|
||||||
if instance is None:
|
if instance is None:
|
||||||
return self
|
return self
|
||||||
if instance is not None and self.function is not None:
|
if instance is not None and self.function is not None:
|
||||||
bound = self.function.__get__(instance, instance.__class__)
|
bound = self.function.__get__(instance, instance.__class__)
|
||||||
return bound() if callable(bound) else bound
|
return bound() if callable(bound) else bound
|
||||||
|
|
||||||
def __set__(self, instance, value):
|
def __set__(self, instance: "Model", value: Any) -> None: # pragma: no cover
|
||||||
|
# kept here so it's a data-descriptor and precedes __dict__ lookup
|
||||||
pass
|
pass
|
||||||
|
|||||||
@ -22,9 +22,14 @@ from ormar.exceptions import ModelError
|
|||||||
from ormar.fields import BaseField
|
from ormar.fields import BaseField
|
||||||
from ormar.fields.foreign_key import ForeignKeyField
|
from ormar.fields.foreign_key import ForeignKeyField
|
||||||
from ormar.fields.many_to_many import ManyToManyField
|
from ormar.fields.many_to_many import ManyToManyField
|
||||||
from ormar.models.descriptors import PkDescriptor, PropertyDescriptor, \
|
from ormar.models.descriptors import (
|
||||||
PydanticDescriptor, \
|
JsonDescriptor,
|
||||||
RelationDescriptor
|
PkDescriptor,
|
||||||
|
PropertyDescriptor,
|
||||||
|
PydanticDescriptor,
|
||||||
|
RelationDescriptor,
|
||||||
|
)
|
||||||
|
from ormar.models.descriptors.descriptors import BytesDescriptor
|
||||||
from ormar.models.helpers import (
|
from ormar.models.helpers import (
|
||||||
alias_manager,
|
alias_manager,
|
||||||
check_required_meta_parameters,
|
check_required_meta_parameters,
|
||||||
@ -481,6 +486,31 @@ def update_attrs_and_fields(
|
|||||||
return updated_model_fields
|
return updated_model_fields
|
||||||
|
|
||||||
|
|
||||||
|
def add_field_descriptor(
|
||||||
|
name: str, field: "BaseField", new_model: Type["Model"]
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Sets appropriate descriptor for each model field.
|
||||||
|
There are 5 main types of descriptors, for bytes, json, pure pydantic fields,
|
||||||
|
and 2 ormar ones - one for relation and one for pk shortcut
|
||||||
|
|
||||||
|
:param name: name of the field
|
||||||
|
:type name: str
|
||||||
|
:param field: model field to add descriptor for
|
||||||
|
:type field: BaseField
|
||||||
|
:param new_model: model with fields
|
||||||
|
:type new_model: Type["Model]
|
||||||
|
"""
|
||||||
|
if field.is_relation:
|
||||||
|
setattr(new_model, name, RelationDescriptor(name=name))
|
||||||
|
elif field.__type__ == pydantic.Json:
|
||||||
|
setattr(new_model, name, JsonDescriptor(name=name))
|
||||||
|
elif field.__type__ == bytes:
|
||||||
|
setattr(new_model, name, BytesDescriptor(name=name))
|
||||||
|
else:
|
||||||
|
setattr(new_model, name, PydanticDescriptor(name=name))
|
||||||
|
|
||||||
|
|
||||||
class ModelMetaclass(pydantic.main.ModelMetaclass):
|
class ModelMetaclass(pydantic.main.ModelMetaclass):
|
||||||
def __new__( # type: ignore # noqa: CCR001
|
def __new__( # type: ignore # noqa: CCR001
|
||||||
mcs: "ModelMetaclass", name: str, bases: Any, attrs: dict
|
mcs: "ModelMetaclass", name: str, bases: Any, attrs: dict
|
||||||
@ -545,10 +575,7 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
|
|||||||
# TODO: iterate only related fields
|
# TODO: iterate only related fields
|
||||||
for name, field in new_model.Meta.model_fields.items():
|
for name, field in new_model.Meta.model_fields.items():
|
||||||
register_relation_in_alias_manager(field=field)
|
register_relation_in_alias_manager(field=field)
|
||||||
if field.is_relation:
|
add_field_descriptor(name=name, field=field, new_model=new_model)
|
||||||
setattr(new_model, name, RelationDescriptor(name=name))
|
|
||||||
else:
|
|
||||||
setattr(new_model, name, PydanticDescriptor(name=name))
|
|
||||||
|
|
||||||
if new_model.Meta.pkname not in attrs["__annotations__"]:
|
if new_model.Meta.pkname not in attrs["__annotations__"]:
|
||||||
field_name = new_model.Meta.pkname
|
field_name = new_model.Meta.pkname
|
||||||
@ -561,10 +588,13 @@ class ModelMetaclass(pydantic.main.ModelMetaclass):
|
|||||||
|
|
||||||
for item in new_model.Meta.property_fields:
|
for item in new_model.Meta.property_fields:
|
||||||
function = getattr(new_model, item)
|
function = getattr(new_model, item)
|
||||||
setattr(new_model, item, PropertyDescriptor(name=item,
|
setattr(
|
||||||
function=function))
|
new_model,
|
||||||
|
item,
|
||||||
|
PropertyDescriptor(name=item, function=function),
|
||||||
|
)
|
||||||
|
|
||||||
setattr(new_model, 'pk', PkDescriptor(name=new_model.Meta.pkname))
|
new_model.pk = PkDescriptor(name=new_model.Meta.pkname)
|
||||||
|
|
||||||
return new_model
|
return new_model
|
||||||
|
|
||||||
|
|||||||
@ -10,7 +10,6 @@ from typing import (
|
|||||||
Mapping,
|
Mapping,
|
||||||
MutableSequence,
|
MutableSequence,
|
||||||
Optional,
|
Optional,
|
||||||
Sequence,
|
|
||||||
Set,
|
Set,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
Tuple,
|
Tuple,
|
||||||
@ -39,7 +38,6 @@ from ormar.models.helpers.sqlalchemy import (
|
|||||||
populate_meta_sqlalchemy_table_if_required,
|
populate_meta_sqlalchemy_table_if_required,
|
||||||
update_column_definition,
|
update_column_definition,
|
||||||
)
|
)
|
||||||
from ormar.models.helpers.validation import validate_choices
|
|
||||||
from ormar.models.metaclass import ModelMeta, ModelMetaclass
|
from ormar.models.metaclass import ModelMeta, ModelMetaclass
|
||||||
from ormar.models.modelproxy import ModelTableProxy
|
from ormar.models.modelproxy import ModelTableProxy
|
||||||
from ormar.queryset.utils import translate_list_to_dict
|
from ormar.queryset.utils import translate_list_to_dict
|
||||||
@ -89,6 +87,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
_pydantic_fields: Set
|
_pydantic_fields: Set
|
||||||
_quick_access_fields: Set
|
_quick_access_fields: Set
|
||||||
_json_fields: Set
|
_json_fields: Set
|
||||||
|
_bytes_fields: Set
|
||||||
Meta: ModelMeta
|
Meta: ModelMeta
|
||||||
|
|
||||||
# noinspection PyMissingConstructor
|
# noinspection PyMissingConstructor
|
||||||
@ -157,23 +156,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
|
|
||||||
def __setattr__(self, name: str, value: Any) -> None: # noqa CCR001
|
def __setattr__(self, name: str, value: Any) -> None: # noqa CCR001
|
||||||
"""
|
"""
|
||||||
Overwrites setattr in object to allow for special behaviour of certain params.
|
Overwrites setattr in pydantic parent as otherwise descriptors are not called.
|
||||||
|
|
||||||
Parameter "pk" is translated into actual primary key field name.
|
|
||||||
|
|
||||||
Relations are expanded (child model constructed if needed) and registered on
|
|
||||||
both ends of the relation. The related models are handled by RelationshipManager
|
|
||||||
exposed at _orm param.
|
|
||||||
|
|
||||||
Json fields converted if needed.
|
|
||||||
|
|
||||||
Setting pk, foreign key value or any other field value sets Model save status
|
|
||||||
to False. Setting a reverse relation or many to many relation does not as it
|
|
||||||
does not modify the state of the model (but related model or through model).
|
|
||||||
|
|
||||||
To short circuit all checks and expansions the set of attribute names present
|
|
||||||
on each model is gathered into _quick_access_fields that is looked first and
|
|
||||||
if field is in this set the object setattr is called directly.
|
|
||||||
|
|
||||||
:param name: name of the attribute to set
|
:param name: name of the attribute to set
|
||||||
:type name: str
|
:type name: str
|
||||||
@ -187,89 +170,30 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
else:
|
else:
|
||||||
# let pydantic handle errors for unknown fields
|
# let pydantic handle errors for unknown fields
|
||||||
super().__setattr__(name, value)
|
super().__setattr__(name, value)
|
||||||
# if name in object.__getattribute__(self, "_quick_access_fields"):
|
|
||||||
# object.__setattr__(self, name, value)
|
|
||||||
# elif name == "pk":
|
|
||||||
# object.__setattr__(self, self.Meta.pkname, value)
|
|
||||||
# object.__getattribute__(self, "set_save_status")(False)
|
|
||||||
# elif name in object.__getattribute__(self, "_orm"):
|
|
||||||
# model = (
|
|
||||||
# object.__getattribute__(self, "Meta")
|
|
||||||
# .model_fields[name]
|
|
||||||
# .expand_relationship(value=value, child=self)
|
|
||||||
# )
|
|
||||||
# if isinstance(object.__getattribute__(self, "__dict__").get(name), list):
|
|
||||||
# # virtual foreign key or many to many
|
|
||||||
# # TODO: Fix double items in dict, no effect on real action ugly repr
|
|
||||||
# # if model.pk not in [x.pk for x in related_list]:
|
|
||||||
# object.__getattribute__(self, "__dict__")[name].append(model)
|
|
||||||
# else:
|
|
||||||
# # foreign key relation
|
|
||||||
# object.__getattribute__(self, "__dict__")[name] = model
|
|
||||||
# object.__getattribute__(self, "set_save_status")(False)
|
|
||||||
# else:
|
|
||||||
# if name in object.__getattribute__(self, "_choices_fields"):
|
|
||||||
# validate_choices(field=self.Meta.model_fields[name], value=value)
|
|
||||||
# value = object.__getattribute__(self, '_convert_bytes')(name, value, op="write")
|
|
||||||
# value = object.__getattribute__(self, '_convert_json')(name, value, op="dumps")
|
|
||||||
# super().__setattr__(name, value)
|
|
||||||
# object.__getattribute__(self, "set_save_status")(False)
|
|
||||||
|
|
||||||
def _internal_set(self, name, value):
|
def __getattr__(self, item: str) -> Any:
|
||||||
|
"""
|
||||||
|
Used only to silence mypy errors for Through models and reverse relations.
|
||||||
|
Not used in real life as in practice calls are intercepted
|
||||||
|
by RelationDescriptors
|
||||||
|
|
||||||
|
:param item: name of attribute
|
||||||
|
:type item: str
|
||||||
|
:return: Any
|
||||||
|
:rtype: Any
|
||||||
|
"""
|
||||||
|
return super().__getattribute__(item)
|
||||||
|
|
||||||
|
def _internal_set(self, name: str, value: Any) -> None:
|
||||||
|
"""
|
||||||
|
Delegates call to pydantic.
|
||||||
|
|
||||||
|
:param name: name of param
|
||||||
|
:type name: str
|
||||||
|
:param value: value to set
|
||||||
|
:type value: Any
|
||||||
|
"""
|
||||||
super().__setattr__(name, value)
|
super().__setattr__(name, value)
|
||||||
# def __getattribute__(self, item: str) -> Any: # noqa: CCR001
|
|
||||||
# """
|
|
||||||
# Because we need to overwrite getting the attribute by ormar instead of pydantic
|
|
||||||
# as well as returning related models and not the value stored on the model the
|
|
||||||
# __getattribute__ needs to be used not __getattr__.
|
|
||||||
#
|
|
||||||
# It's used to access all attributes so it can be a big overhead that's why a
|
|
||||||
# number of short circuits is used.
|
|
||||||
#
|
|
||||||
# To short circuit all checks and expansions the set of attribute names present
|
|
||||||
# on each model is gathered into _quick_access_fields that is looked first and
|
|
||||||
# if field is in this set the object setattr is called directly.
|
|
||||||
#
|
|
||||||
# To avoid recursion object's getattribute is used to actually get the attribute
|
|
||||||
# value from the model after the checks.
|
|
||||||
#
|
|
||||||
# Even the function calls are constructed with objects functions.
|
|
||||||
#
|
|
||||||
# Parameter "pk" is translated into actual primary key field name.
|
|
||||||
#
|
|
||||||
# Relations are returned so the actual related model is returned and not current
|
|
||||||
# model's field. The related models are handled by RelationshipManager exposed
|
|
||||||
# at _orm param.
|
|
||||||
#
|
|
||||||
# Json fields are converted if needed.
|
|
||||||
#
|
|
||||||
# :param item: name of the attribute to retrieve
|
|
||||||
# :type item: str
|
|
||||||
# :return: value of the attribute
|
|
||||||
# :rtype: Any
|
|
||||||
# """
|
|
||||||
# if item in object.__getattribute__(self, "_quick_access_fields"):
|
|
||||||
# return object.__getattribute__(self, item)
|
|
||||||
# # if item == "pk":
|
|
||||||
# # return object.__getattribute__(self, "__dict__").get(self.Meta.pkname, None)
|
|
||||||
# # if item in object.__getattribute__(self, "extract_related_names")():
|
|
||||||
# # return object.__getattribute__(
|
|
||||||
# # self, "_extract_related_model_instead_of_field"
|
|
||||||
# # )(item)
|
|
||||||
# # if item in object.__getattribute__(self, "extract_through_names")():
|
|
||||||
# # return object.__getattribute__(
|
|
||||||
# # self, "_extract_related_model_instead_of_field"
|
|
||||||
# # )(item)
|
|
||||||
# # if item in object.__getattribute__(self, "Meta").property_fields:
|
|
||||||
# # value = object.__getattribute__(self, item)
|
|
||||||
# # return value() if callable(value) else value
|
|
||||||
# # if item in object.__getattribute__(self, "_pydantic_fields"):
|
|
||||||
# # value = object.__getattribute__(self, "__dict__").get(item, None)
|
|
||||||
# # value = object.__getattribute__(self, "_convert_json")(item, value, "loads")
|
|
||||||
# # value = object.__getattribute__(self, "_convert_bytes")(item, value, "read")
|
|
||||||
# # return value
|
|
||||||
#
|
|
||||||
# return object.__getattribute__(self, item) # pragma: no cover
|
|
||||||
|
|
||||||
def _verify_model_can_be_initialized(self) -> None:
|
def _verify_model_can_be_initialized(self) -> None:
|
||||||
"""
|
"""
|
||||||
@ -278,9 +202,9 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
:return: None
|
:return: None
|
||||||
:rtype: None
|
:rtype: None
|
||||||
"""
|
"""
|
||||||
if object.__getattribute__(self, "Meta").abstract:
|
if self.Meta.abstract:
|
||||||
raise ModelError(f"You cannot initialize abstract model {self.get_name()}")
|
raise ModelError(f"You cannot initialize abstract model {self.get_name()}")
|
||||||
if object.__getattribute__(self, "Meta").requires_ref_update:
|
if self.Meta.requires_ref_update:
|
||||||
raise ModelError(
|
raise ModelError(
|
||||||
f"Model {self.get_name()} has not updated "
|
f"Model {self.get_name()} has not updated "
|
||||||
f"ForwardRefs. \nBefore using the model you "
|
f"ForwardRefs. \nBefore using the model you "
|
||||||
@ -304,11 +228,9 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
:return: modified kwargs
|
:return: modified kwargs
|
||||||
:rtype: Tuple[Dict, Dict]
|
:rtype: Tuple[Dict, Dict]
|
||||||
"""
|
"""
|
||||||
meta = object.__getattribute__(self, "Meta")
|
property_fields = self.Meta.property_fields
|
||||||
property_fields = meta.property_fields
|
model_fields = self.Meta.model_fields
|
||||||
model_fields = meta.model_fields
|
pydantic_fields = set(self.__fields__.keys())
|
||||||
pydantic_fields = object.__getattribute__(self, "__fields__")
|
|
||||||
bytes_fields = object.__getattribute__(self, '_bytes_fields')
|
|
||||||
|
|
||||||
# remove property fields
|
# remove property fields
|
||||||
for prop_filed in property_fields:
|
for prop_filed in property_fields:
|
||||||
@ -316,7 +238,7 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
|
|
||||||
excluded: Set[str] = kwargs.pop("__excluded__", set())
|
excluded: Set[str] = kwargs.pop("__excluded__", set())
|
||||||
if "pk" in kwargs:
|
if "pk" in kwargs:
|
||||||
kwargs[meta.pkname] = kwargs.pop("pk")
|
kwargs[self.Meta.pkname] = kwargs.pop("pk")
|
||||||
|
|
||||||
# extract through fields
|
# extract through fields
|
||||||
through_tmp_dict = dict()
|
through_tmp_dict = dict()
|
||||||
@ -325,12 +247,14 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
new_kwargs: Dict[str, Any] = {
|
new_kwargs: Dict[str, Any] = {
|
||||||
k: self._convert_json(
|
k: self._convert_to_bytes(
|
||||||
|
k,
|
||||||
|
self._convert_json(
|
||||||
k,
|
k,
|
||||||
model_fields[k].expand_relationship(v, self, to_register=False,)
|
model_fields[k].expand_relationship(v, self, to_register=False,)
|
||||||
if k in model_fields
|
if k in model_fields
|
||||||
else (v if k in pydantic_fields else model_fields[k]),
|
else (v if k in pydantic_fields else model_fields[k]),
|
||||||
"dumps",
|
),
|
||||||
)
|
)
|
||||||
for k, v in kwargs.items()
|
for k, v in kwargs.items()
|
||||||
}
|
}
|
||||||
@ -362,21 +286,6 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
def _extract_related_model_instead_of_field(
|
|
||||||
self, item: str
|
|
||||||
) -> Optional[Union["Model", Sequence["Model"]]]:
|
|
||||||
"""
|
|
||||||
Retrieves the related model/models from RelationshipManager.
|
|
||||||
|
|
||||||
:param item: name of the relation
|
|
||||||
:type item: str
|
|
||||||
:return: related model, list of related models or None
|
|
||||||
:rtype: Optional[Union[Model, List[Model]]]
|
|
||||||
"""
|
|
||||||
if item in self._orm:
|
|
||||||
return self._orm.get(item) # type: ignore
|
|
||||||
return None # pragma no cover
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
"""
|
"""
|
||||||
Compares other model to this model. when == is called.
|
Compares other model to this model. when == is called.
|
||||||
@ -758,6 +667,11 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
exclude_none=exclude_none,
|
exclude_none=exclude_none,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
dict_instance = {
|
||||||
|
k: self._convert_bytes_to_str(column_name=k, value=v)
|
||||||
|
for k, v in dict_instance.items()
|
||||||
|
}
|
||||||
|
|
||||||
if include and isinstance(include, Set):
|
if include and isinstance(include, Set):
|
||||||
include = translate_list_to_dict(include)
|
include = translate_list_to_dict(include)
|
||||||
if exclude and isinstance(exclude, Set):
|
if exclude and isinstance(exclude, Set):
|
||||||
@ -844,40 +758,46 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
setattr(self, key, value)
|
setattr(self, key, value)
|
||||||
return self
|
return self
|
||||||
|
|
||||||
def _convert_bytes(self, column_name: str, value: Any, op: str) -> Union[str, Dict]:
|
def _convert_to_bytes(self, column_name: str, value: Any) -> Union[str, Dict]:
|
||||||
"""
|
"""
|
||||||
Converts value to/from json if needed (for Json columns).
|
Converts value to bytes from string
|
||||||
|
|
||||||
:param column_name: name of the field
|
:param column_name: name of the field
|
||||||
:type column_name: str
|
:type column_name: str
|
||||||
:param value: value fo the field
|
:param value: value fo the field
|
||||||
:type value: Any
|
:type value: Any
|
||||||
:param op: operator on json
|
|
||||||
:type op: str
|
|
||||||
:return: converted value if needed, else original value
|
:return: converted value if needed, else original value
|
||||||
:rtype: Any
|
:rtype: Any
|
||||||
"""
|
"""
|
||||||
if column_name not in object.__getattribute__(self, "_bytes_fields"):
|
if column_name not in self._bytes_fields:
|
||||||
return value
|
return value
|
||||||
field = self.Meta.model_fields[column_name]
|
field = self.Meta.model_fields[column_name]
|
||||||
condition = (
|
if not isinstance(value, bytes):
|
||||||
isinstance(value, bytes) if op == "read" else not isinstance(value, bytes)
|
if field.represent_as_base64_str:
|
||||||
)
|
|
||||||
if op == "read" and condition:
|
|
||||||
if field.use_base64:
|
|
||||||
value = base64.b64encode(value)
|
|
||||||
elif field.represent_as_base64_str:
|
|
||||||
value = base64.b64encode(value).decode()
|
|
||||||
else:
|
|
||||||
value = value.decode("utf-8")
|
|
||||||
elif condition:
|
|
||||||
if field.use_base64 or field.represent_as_base64_str:
|
|
||||||
value = base64.b64decode(value)
|
value = base64.b64decode(value)
|
||||||
else:
|
else:
|
||||||
value = value.encode("utf-8")
|
value = value.encode("utf-8")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
def _convert_json(self, column_name: str, value: Any, op: str) -> Union[str, Dict]:
|
def _convert_bytes_to_str(self, column_name: str, value: Any) -> Union[str, Dict]:
|
||||||
|
"""
|
||||||
|
Converts value to str from bytes for represent_as_base64_str columns.
|
||||||
|
|
||||||
|
:param column_name: name of the field
|
||||||
|
:type column_name: str
|
||||||
|
:param value: value fo the field
|
||||||
|
:type value: Any
|
||||||
|
:return: converted value if needed, else original value
|
||||||
|
:rtype: Any
|
||||||
|
"""
|
||||||
|
if column_name not in self._bytes_fields:
|
||||||
|
return value
|
||||||
|
field = self.Meta.model_fields[column_name]
|
||||||
|
if not isinstance(value, str) and field.represent_as_base64_str:
|
||||||
|
return base64.b64encode(value).decode()
|
||||||
|
return value
|
||||||
|
|
||||||
|
def _convert_json(self, column_name: str, value: Any) -> Union[str, Dict]:
|
||||||
"""
|
"""
|
||||||
Converts value to/from json if needed (for Json columns).
|
Converts value to/from json if needed (for Json columns).
|
||||||
|
|
||||||
@ -885,24 +805,14 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
|||||||
:type column_name: str
|
:type column_name: str
|
||||||
:param value: value fo the field
|
:param value: value fo the field
|
||||||
:type value: Any
|
:type value: Any
|
||||||
:param op: operator on json
|
|
||||||
:type op: str
|
|
||||||
:return: converted value if needed, else original value
|
:return: converted value if needed, else original value
|
||||||
:rtype: Any
|
:rtype: Any
|
||||||
"""
|
"""
|
||||||
if column_name not in object.__getattribute__(self, "_json_fields"):
|
if column_name not in self._json_fields:
|
||||||
return value
|
return value
|
||||||
|
if not isinstance(value, str):
|
||||||
condition = (
|
|
||||||
isinstance(value, str) if op == "loads" else not isinstance(value, str)
|
|
||||||
)
|
|
||||||
operand: Callable[[Any], Any] = (
|
|
||||||
json.loads if op == "loads" else json.dumps # type: ignore
|
|
||||||
)
|
|
||||||
|
|
||||||
if condition:
|
|
||||||
try:
|
try:
|
||||||
value = operand(value)
|
value = json.dumps(value)
|
||||||
except TypeError: # pragma no cover
|
except TypeError: # pragma no cover
|
||||||
pass
|
pass
|
||||||
return value.decode("utf-8") if isinstance(value, bytes) else value
|
return value.decode("utf-8") if isinstance(value, bytes) else value
|
||||||
|
|||||||
@ -76,6 +76,11 @@ renderer:
|
|||||||
- title: Save Prepare Mixin
|
- title: Save Prepare Mixin
|
||||||
contents:
|
contents:
|
||||||
- models.mixins.save_mixin.*
|
- models.mixins.save_mixin.*
|
||||||
|
- title: Descriptors
|
||||||
|
children:
|
||||||
|
- title: descriptors
|
||||||
|
contents:
|
||||||
|
- models.descriptors.descriptors.*
|
||||||
- title: Fields
|
- title: Fields
|
||||||
children:
|
children:
|
||||||
- title: Base Field
|
- title: Base Field
|
||||||
|
|||||||
@ -55,7 +55,7 @@ class BinaryThing(ormar.Model):
|
|||||||
bt: bytes = ormar.LargeBinary(
|
bt: bytes = ormar.LargeBinary(
|
||||||
max_length=1000,
|
max_length=1000,
|
||||||
choices=[blob3, blob4, blob5, blob6],
|
choices=[blob3, blob4, blob5, blob6],
|
||||||
represent_as_base64_str=True
|
represent_as_base64_str=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -84,5 +84,8 @@ def test_read_main():
|
|||||||
response = client.post(
|
response = client.post(
|
||||||
"/things", data=json.dumps({"bt": base64.b64encode(blob3).decode()})
|
"/things", data=json.dumps({"bt": base64.b64encode(blob3).decode()})
|
||||||
)
|
)
|
||||||
print(response.content)
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
|
response = client.get("/things")
|
||||||
|
assert response.json()[0]["bt"] == base64.b64encode(blob3).decode()
|
||||||
|
thing = BinaryThing(**response.json()[0])
|
||||||
|
assert thing.__dict__["bt"] == blob3
|
||||||
|
|||||||
@ -55,6 +55,7 @@ def test_fields_access():
|
|||||||
# basic access
|
# basic access
|
||||||
assert Product.id._field == Product.Meta.model_fields["id"]
|
assert Product.id._field == Product.Meta.model_fields["id"]
|
||||||
assert Product.id.id == Product.Meta.model_fields["id"]
|
assert Product.id.id == Product.Meta.model_fields["id"]
|
||||||
|
assert Product.pk.id == Product.id.id
|
||||||
assert isinstance(Product.id._field, BaseField)
|
assert isinstance(Product.id._field, BaseField)
|
||||||
assert Product.id._access_chain == "id"
|
assert Product.id._access_chain == "id"
|
||||||
assert Product.id._source_model == Product
|
assert Product.id._source_model == Product
|
||||||
|
|||||||
@ -1,4 +1,5 @@
|
|||||||
import asyncio
|
import asyncio
|
||||||
|
import base64
|
||||||
import datetime
|
import datetime
|
||||||
import os
|
import os
|
||||||
import uuid
|
import uuid
|
||||||
@ -53,7 +54,7 @@ class LargeBinaryStr(ormar.Model):
|
|||||||
|
|
||||||
id: int = ormar.Integer(primary_key=True)
|
id: int = ormar.Integer(primary_key=True)
|
||||||
test_binary: str = ormar.LargeBinary(
|
test_binary: str = ormar.LargeBinary(
|
||||||
max_length=100000, choices=[blob3, blob4], represent_as_base64=True
|
max_length=100000, choices=[blob3, blob4], represent_as_base64_str=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -174,6 +175,9 @@ async def test_json_column():
|
|||||||
assert items[0].test_json == dict(aa=12)
|
assert items[0].test_json == dict(aa=12)
|
||||||
assert items[1].test_json == dict(aa=12)
|
assert items[1].test_json == dict(aa=12)
|
||||||
|
|
||||||
|
items[0].test_json = "[1, 2, 3]"
|
||||||
|
assert items[0].test_json == [1, 2, 3]
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_binary_column():
|
async def test_binary_column():
|
||||||
@ -187,6 +191,9 @@ async def test_binary_column():
|
|||||||
assert items[0].test_binary == blob
|
assert items[0].test_binary == blob
|
||||||
assert items[1].test_binary == blob2
|
assert items[1].test_binary == blob2
|
||||||
|
|
||||||
|
items[0].test_binary = "test2icac89uc98"
|
||||||
|
assert items[0].test_binary == b"test2icac89uc98"
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_binary_str_column():
|
async def test_binary_str_column():
|
||||||
@ -197,8 +204,11 @@ async def test_binary_str_column():
|
|||||||
|
|
||||||
items = await LargeBinaryStr.objects.all()
|
items = await LargeBinaryStr.objects.all()
|
||||||
assert len(items) == 2
|
assert len(items) == 2
|
||||||
assert items[0].test_binary == blob3
|
assert items[0].test_binary == base64.b64encode(blob3).decode()
|
||||||
assert items[1].test_binary == blob4
|
items[0].test_binary = base64.b64encode(blob4).decode()
|
||||||
|
assert items[0].test_binary == base64.b64encode(blob4).decode()
|
||||||
|
assert items[1].test_binary == base64.b64encode(blob4).decode()
|
||||||
|
assert items[1].__dict__["test_binary"] == blob4
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|||||||
Reference in New Issue
Block a user