add excludes for pks and through models in dict

This commit is contained in:
collerek
2021-05-07 18:38:44 +02:00
parent e564acbb45
commit 70ac1e3361
10 changed files with 298 additions and 23 deletions

View File

@ -68,7 +68,16 @@ Ormar is built with:
* [`pydantic`][pydantic] for data validation. * [`pydantic`][pydantic] for data validation.
* `typing_extensions` for python 3.6 - 3.7 * `typing_extensions` for python 3.6 - 3.7
### Migrating from `sqlalchemy` ### License
`ormar` is built as an open-sorce software and remain completely free (MIT license).
As I write open-source code to solve everyday problems in my work or to promote and build strong python
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help me ensure my work remains free and maintained.
<iframe src="https://github.com/sponsors/collerek/button" title="Sponsor collerek" height="35" width="116" style="border: 0;"></iframe>
### Migrating from `sqlalchemy` and existing databases
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually. tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
@ -76,6 +85,8 @@ tool that can help you with translating existing sqlalchemy orm models so you do
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar) **Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
or simply `pip install sqlalchemy-to-ormar` or simply `pip install sqlalchemy-to-ormar`
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use the `sqlalchemy` for your project.
### Migrations & Database creation ### Migrations & Database creation
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide

View File

@ -68,7 +68,16 @@ Ormar is built with:
* [`pydantic`][pydantic] for data validation. * [`pydantic`][pydantic] for data validation.
* `typing_extensions` for python 3.6 - 3.7 * `typing_extensions` for python 3.6 - 3.7
### Migrating from `sqlalchemy` ### License
`ormar` is built as an open-sorce software and remain completely free (MIT license).
As I write open-source code to solve everyday problems in my work or to promote and build strong python
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help me ensure my work remains free and maintained.
<iframe src="https://github.com/sponsors/collerek/button" title="Sponsor collerek" height="35" width="116" style="border: 0;"></iframe>
### Migrating from `sqlalchemy` and existing databases
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually. tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
@ -76,6 +85,8 @@ tool that can help you with translating existing sqlalchemy orm models so you do
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar) **Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
or simply `pip install sqlalchemy-to-ormar` or simply `pip install sqlalchemy-to-ormar`
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use the `sqlalchemy` for your project.
### Migrations & Database creation ### Migrations & Database creation
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide

View File

@ -1,3 +1,15 @@
# 0.10.7
## ✨ Features
* Add `exclude_primary_keys` flag to `dict()` method that allows to exclude all primary key columns in the resulting dictionaru.
* Add `exclude_through_models` flag to `dict()` that allows excluding all through models from `ManyToMany` relations
## 🐛 Fixes
* Remove default `None` option for `max_length` for `LargeBinary` field
* Remove default `None` option for `max_length` for `String` field
# 0.10.6 # 0.10.6
## ✨ Features ## ✨ Features

View File

@ -136,10 +136,10 @@ class String(ModelFieldFactory, str):
def __new__( # type: ignore # noqa CFQ002 def __new__( # type: ignore # noqa CFQ002
cls, cls,
*, *,
max_length: int,
allow_blank: bool = True, allow_blank: bool = True,
strip_whitespace: bool = False, strip_whitespace: bool = False,
min_length: int = None, min_length: int = None,
max_length: int = None,
curtail_length: int = None, curtail_length: int = None,
regex: str = None, regex: str = None,
**kwargs: Any **kwargs: Any
@ -176,7 +176,7 @@ class String(ModelFieldFactory, str):
:type kwargs: Any :type kwargs: Any
""" """
max_length = kwargs.get("max_length", None) max_length = kwargs.get("max_length", None)
if max_length is None or max_length <= 0: if max_length <= 0:
raise ModelDefinitionError( raise ModelDefinitionError(
"Parameter max_length is required for field String" "Parameter max_length is required for field String"
) )
@ -435,7 +435,7 @@ class LargeBinary(ModelFieldFactory, bytes):
_sample = "bytes" _sample = "bytes"
def __new__( # type: ignore # noqa CFQ002 def __new__( # type: ignore # noqa CFQ002
cls, *, max_length: int = None, **kwargs: Any cls, *, max_length: int, **kwargs: Any
) -> BaseField: # type: ignore ) -> BaseField: # type: ignore
kwargs = { kwargs = {
**kwargs, **kwargs,
@ -468,7 +468,7 @@ class LargeBinary(ModelFieldFactory, bytes):
:type kwargs: Any :type kwargs: Any
""" """
max_length = kwargs.get("max_length", None) max_length = kwargs.get("max_length", None)
if max_length is None or max_length <= 0: if max_length <= 0:
raise ModelDefinitionError( raise ModelDefinitionError(
"Parameter max_length is required for field LargeBinary" "Parameter max_length is required for field LargeBinary"
) )

View File

@ -151,7 +151,7 @@ class ExcludableMixin(RelationMixin):
:return: set or dict with excluded fields added. :return: set or dict with excluded fields added.
:rtype: Union[Set, Dict] :rtype: Union[Set, Dict]
""" """
exclude = exclude or {} exclude = exclude or set()
related_set = cls.extract_related_names() related_set = cls.extract_related_names()
if isinstance(exclude, set): if isinstance(exclude, set):
exclude = {s for s in exclude} exclude = {s for s in exclude}
@ -162,6 +162,26 @@ class ExcludableMixin(RelationMixin):
exclude = exclude.union(related_set) exclude = exclude.union(related_set)
return exclude return exclude
@classmethod
def _update_excluded_with_pks_and_through(
cls, exclude: Set, exclude_primary_keys: bool, exclude_through_models: bool
) -> Set:
"""
Updates excluded names with name of pk column if exclude flag is set.
:param exclude: set of names to exclude
:type exclude: Set
:param exclude_primary_keys: flag if the primary keys should be excluded
:type exclude_primary_keys: bool
:return: set updated with pk if flag is set
:rtype: Set
"""
if exclude_primary_keys:
exclude.add(cls.Meta.pkname)
if exclude_through_models:
exclude = exclude.union(cls.extract_through_names())
return exclude
@classmethod @classmethod
def get_names_to_exclude(cls, excludable: ExcludableItems, alias: str) -> Set: def get_names_to_exclude(cls, excludable: ExcludableItems, alias: str) -> Set:
""" """

View File

@ -562,6 +562,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
models: MutableSequence, models: MutableSequence,
include: Union[Set, Dict, None], include: Union[Set, Dict, None],
exclude: Union[Set, Dict, None], exclude: Union[Set, Dict, None],
exclude_primary_keys: bool,
exclude_through_models: bool,
) -> List: ) -> List:
""" """
Converts list of models into list of dictionaries. Converts list of models into list of dictionaries.
@ -580,7 +582,11 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
try: try:
result.append( result.append(
model.dict( model.dict(
relation_map=relation_map, include=include, exclude=exclude, relation_map=relation_map,
include=include,
exclude=exclude,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
) )
) )
except ReferenceError: # pragma no cover except ReferenceError: # pragma no cover
@ -623,6 +629,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
dict_instance: Dict, dict_instance: Dict,
include: Optional[Dict], include: Optional[Dict],
exclude: Optional[Dict], exclude: Optional[Dict],
exclude_primary_keys: bool,
exclude_through_models: bool,
) -> Dict: ) -> Dict:
""" """
Traverse nested models and converts them into dictionaries. Traverse nested models and converts them into dictionaries.
@ -655,6 +663,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
models=nested_model, models=nested_model,
include=self._convert_all(self._skip_ellipsis(include, field)), include=self._convert_all(self._skip_ellipsis(include, field)),
exclude=self._convert_all(self._skip_ellipsis(exclude, field)), exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
) )
elif nested_model is not None: elif nested_model is not None:
@ -664,6 +674,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
), ),
include=self._convert_all(self._skip_ellipsis(include, field)), include=self._convert_all(self._skip_ellipsis(include, field)),
exclude=self._convert_all(self._skip_ellipsis(exclude, field)), exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
) )
else: else:
dict_instance[field] = None dict_instance[field] = None
@ -681,6 +693,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
exclude_unset: bool = False, exclude_unset: bool = False,
exclude_defaults: bool = False, exclude_defaults: bool = False,
exclude_none: bool = False, exclude_none: bool = False,
exclude_primary_keys: bool = False,
exclude_through_models: bool = False,
relation_map: Dict = None, relation_map: Dict = None,
) -> "DictStrAny": # noqa: A003' ) -> "DictStrAny": # noqa: A003'
""" """
@ -692,6 +706,10 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
Additionally fields decorated with @property_field are also added. Additionally fields decorated with @property_field are also added.
:param exclude_through_models: flag to exclude through models from dict
:type exclude_through_models: bool
:param exclude_primary_keys: flag to exclude primary keys from dict
:type exclude_primary_keys: bool
:param include: fields to include :param include: fields to include
:type include: Union[Set, Dict, None] :type include: Union[Set, Dict, None]
:param exclude: fields to exclude :param exclude: fields to exclude
@ -711,9 +729,15 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
:return: :return:
:rtype: :rtype:
""" """
pydantic_exclude = self._update_excluded_with_related(exclude)
pydantic_exclude = self._update_excluded_with_pks_and_through(
exclude=pydantic_exclude,
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
)
dict_instance = super().dict( dict_instance = super().dict(
include=include, include=include,
exclude=self._update_excluded_with_related(exclude), exclude=pydantic_exclude,
by_alias=by_alias, by_alias=by_alias,
skip_defaults=skip_defaults, skip_defaults=skip_defaults,
exclude_unset=exclude_unset, exclude_unset=exclude_unset,
@ -738,6 +762,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
dict_instance=dict_instance, dict_instance=dict_instance,
include=include, # type: ignore include=include, # type: ignore
exclude=exclude, # type: ignore exclude=exclude, # type: ignore
exclude_primary_keys=exclude_primary_keys,
exclude_through_models=exclude_through_models,
) )
# include model properties as fields in dict # include model properties as fields in dict

View File

@ -1,4 +1,4 @@
from typing import Optional from typing import List, Optional
import databases import databases
import pytest import pytest
@ -11,16 +11,28 @@ metadata = sqlalchemy.MetaData()
database = databases.Database(DATABASE_URL, force_rollback=True) database = databases.Database(DATABASE_URL, force_rollback=True)
class MainMeta(ormar.ModelMeta):
metadata = metadata
database = database
class Role(ormar.Model):
class Meta(MainMeta):
pass
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=255, nullable=False)
class User(ormar.Model): class User(ormar.Model):
class Meta: class Meta(MainMeta):
tablename: str = "users" tablename: str = "users"
metadata = metadata
database = database
id: int = ormar.Integer(primary_key=True) id: int = ormar.Integer(primary_key=True)
email: str = ormar.String(max_length=255, nullable=False) email: str = ormar.String(max_length=255, nullable=False)
password: str = ormar.String(max_length=255, nullable=True) password: str = ormar.String(max_length=255, nullable=True)
first_name: str = ormar.String(max_length=255, nullable=False) first_name: str = ormar.String(max_length=255, nullable=False)
roles: List[Role] = ormar.ManyToMany(Role)
class Tier(ormar.Model): class Tier(ormar.Model):
@ -58,12 +70,20 @@ class Item(ormar.Model):
@pytest.fixture(autouse=True, scope="module") @pytest.fixture(autouse=True, scope="module")
def sample_data(): def sample_data():
user = User(email="test@test.com", password="ijacids7^*&", first_name="Anna") role = Role(name="User", id=1)
tier = Tier(name="Tier I") role2 = Role(name="Admin", id=2)
category1 = Category(name="Toys", tier=tier) user = User(
category2 = Category(name="Weapons", tier=tier) id=1,
item1 = Item(name="Teddy Bear", category=category1, created_by=user) email="test@test.com",
item2 = Item(name="M16", category=category2, created_by=user) password="ijacids7^*&",
first_name="Anna",
roles=[role, role2],
)
tier = Tier(id=1, name="Tier I")
category1 = Category(id=1, name="Toys", tier=tier)
category2 = Category(id=2, name="Weapons", tier=tier)
item1 = Item(id=1, name="Teddy Bear", category=category1, created_by=user)
item2 = Item(id=2, name="M16", category=category2, created_by=user)
return item1, item2 return item1, item2
@ -139,4 +159,30 @@ def test_dumping_to_dict_exclude_and_include_nested_dict(sample_data):
assert dict2["category"]["name"] == "Toys" assert dict2["category"]["name"] == "Toys"
assert "created_by" not in dict1 assert "created_by" not in dict1
assert dict1["category"]["tier"].get("name") is None assert dict1["category"]["tier"].get("name") is None
assert dict1["category"]["tier"]["id"] is None assert dict1["category"]["tier"]["id"] == 1
def test_dumping_dict_without_primary_keys(sample_data):
item1, item2 = sample_data
dict1 = item2.dict(exclude_primary_keys=True)
assert dict1 == {
"category": {"name": "Weapons", "tier": {"name": "Tier I"}},
"created_by": {
"email": "test@test.com",
"first_name": "Anna",
"password": "ijacids7^*&",
"roles": [{"name": "User"}, {"name": "Admin"}],
},
"name": "M16",
}
dict2 = item1.dict(exclude_primary_keys=True)
assert dict2 == {
"category": {"name": "Toys", "tier": {"name": "Tier I"}},
"created_by": {
"email": "test@test.com",
"first_name": "Anna",
"password": "ijacids7^*&",
"roles": [{"name": "User"}, {"name": "Admin"}],
},
"name": "Teddy Bear",
}

View File

@ -0,0 +1,149 @@
from typing import List
import databases
import pytest
import sqlalchemy
from fastapi import FastAPI
from starlette.testclient import TestClient
import ormar
from tests.settings import DATABASE_URL
app = FastAPI()
metadata = sqlalchemy.MetaData()
database = databases.Database(DATABASE_URL, force_rollback=True)
app.state.database = database
@app.on_event("startup")
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
@app.on_event("shutdown")
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
class Category(ormar.Model):
class Meta:
tablename = "categories"
metadata = metadata
database = database
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
class Item(ormar.Model):
class Meta:
tablename = "items"
metadata = metadata
database = database
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=100)
categories: List[Category] = ormar.ManyToMany(Category)
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
@app.post("/items/", response_model=Item)
async def create_item(item: Item):
await item.save_related(follow=True, save_all=True)
return item
@app.get("/items/{item_id}")
async def get_item(item_id: int):
item = await Item.objects.select_related("categories").get(pk=item_id)
return item.dict(exclude_primary_keys=True, exclude_through_models=True)
@app.get("/categories/{category_id}")
async def get_category(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
return category.dict(exclude_primary_keys=True)
@app.get("/categories/nt/{category_id}")
async def get_category_no_through(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
return category.dict(exclude_through_models=True)
@app.get("/categories/ntp/{category_id}")
async def get_category_no_pk_through(category_id: int):
category = await Category.objects.select_related("items").get(pk=category_id)
return category.dict(exclude_through_models=True, exclude_primary_keys=True)
@app.get(
"/items/fex/{item_id}",
response_model=Item,
response_model_exclude={
"id",
"categories__id",
"categories__itemcategory",
"categories__items",
},
)
async def get_item_excl(item_id: int):
item = await Item.objects.select_all().get(pk=item_id)
return item
def test_all_endpoints():
client = TestClient(app)
with client as client:
item = {
"name": "test",
"categories": [{"name": "test cat"}, {"name": "test cat2"}],
}
response = client.post("/items/", json=item)
item_check = Item(**response.json())
assert item_check.id is not None
assert item_check.categories[0].id is not None
no_pk_item = client.get(f"/items/{item_check.id}", json=item).json()
assert no_pk_item == item
no_pk_item2 = client.get(f"/items/fex/{item_check.id}", json=item).json()
assert no_pk_item2 == item
no_pk_category = client.get(
f"/categories/{item_check.categories[0].id}", json=item
).json()
assert no_pk_category == {
"items": [
{
"itemcategory": {"category": None, "id": 1, "item": None},
"name": "test",
}
],
"name": "test cat",
}
no_through_category = client.get(
f"/categories/nt/{item_check.categories[0].id}", json=item
).json()
assert no_through_category == {
"id": 1,
"items": [{"id": 1, "name": "test"}],
"name": "test cat",
}
no_through_category = client.get(
f"/categories/ntp/{item_check.categories[0].id}", json=item
).json()
assert no_through_category == {"items": [{"name": "test"}], "name": "test cat"}

View File

@ -228,7 +228,7 @@ def test_binary_error_without_length_model_definition():
database = database database = database
metadata = metadata metadata = metadata
test: bytes = ormar.LargeBinary(primary_key=True) test: bytes = ormar.LargeBinary(primary_key=True, max_length=-1)
@typing.no_type_check @typing.no_type_check
@ -241,7 +241,7 @@ def test_string_error_in_model_definition():
database = database database = database
metadata = metadata metadata = metadata
test: str = ormar.String(primary_key=True) test: str = ormar.String(primary_key=True, max_length=0)
@typing.no_type_check @typing.no_type_check