add excludes for pks and through models in dict
This commit is contained in:
13
README.md
13
README.md
@ -68,7 +68,16 @@ Ormar is built with:
|
||||
* [`pydantic`][pydantic] for data validation.
|
||||
* `typing_extensions` for python 3.6 - 3.7
|
||||
|
||||
### Migrating from `sqlalchemy`
|
||||
### License
|
||||
|
||||
`ormar` is built as an open-sorce software and remain completely free (MIT license).
|
||||
|
||||
As I write open-source code to solve everyday problems in my work or to promote and build strong python
|
||||
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help me ensure my work remains free and maintained.
|
||||
|
||||
<iframe src="https://github.com/sponsors/collerek/button" title="Sponsor collerek" height="35" width="116" style="border: 0;"></iframe>
|
||||
|
||||
### Migrating from `sqlalchemy` and existing databases
|
||||
|
||||
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
|
||||
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
|
||||
@ -76,6 +85,8 @@ tool that can help you with translating existing sqlalchemy orm models so you do
|
||||
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
|
||||
or simply `pip install sqlalchemy-to-ormar`
|
||||
|
||||
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use the `sqlalchemy` for your project.
|
||||
|
||||
### Migrations & Database creation
|
||||
|
||||
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide
|
||||
|
||||
@ -68,7 +68,16 @@ Ormar is built with:
|
||||
* [`pydantic`][pydantic] for data validation.
|
||||
* `typing_extensions` for python 3.6 - 3.7
|
||||
|
||||
### Migrating from `sqlalchemy`
|
||||
### License
|
||||
|
||||
`ormar` is built as an open-sorce software and remain completely free (MIT license).
|
||||
|
||||
As I write open-source code to solve everyday problems in my work or to promote and build strong python
|
||||
community you can say thank you and buy me a coffee or sponsor me with a monthly amount to help me ensure my work remains free and maintained.
|
||||
|
||||
<iframe src="https://github.com/sponsors/collerek/button" title="Sponsor collerek" height="35" width="116" style="border: 0;"></iframe>
|
||||
|
||||
### Migrating from `sqlalchemy` and existing databases
|
||||
|
||||
If you currently use `sqlalchemy` and would like to switch to `ormar` check out the auto-translation
|
||||
tool that can help you with translating existing sqlalchemy orm models so you do not have to do it manually.
|
||||
@ -76,6 +85,8 @@ tool that can help you with translating existing sqlalchemy orm models so you do
|
||||
**Beta** versions available at github: [`sqlalchemy-to-ormar`](https://github.com/collerek/sqlalchemy-to-ormar)
|
||||
or simply `pip install sqlalchemy-to-ormar`
|
||||
|
||||
`sqlalchemy-to-ormar` can be used in pair with `sqlacodegen` to auto-map/ generate `ormar` models from existing database, even if you don't use the `sqlalchemy` for your project.
|
||||
|
||||
### Migrations & Database creation
|
||||
|
||||
Because ormar is built on SQLAlchemy core, you can use [`alembic`][alembic] to provide
|
||||
|
||||
@ -1,3 +1,15 @@
|
||||
# 0.10.7
|
||||
|
||||
## ✨ Features
|
||||
|
||||
* Add `exclude_primary_keys` flag to `dict()` method that allows to exclude all primary key columns in the resulting dictionaru.
|
||||
* Add `exclude_through_models` flag to `dict()` that allows excluding all through models from `ManyToMany` relations
|
||||
|
||||
## 🐛 Fixes
|
||||
|
||||
* Remove default `None` option for `max_length` for `LargeBinary` field
|
||||
* Remove default `None` option for `max_length` for `String` field
|
||||
|
||||
# 0.10.6
|
||||
|
||||
## ✨ Features
|
||||
|
||||
@ -136,10 +136,10 @@ class String(ModelFieldFactory, str):
|
||||
def __new__( # type: ignore # noqa CFQ002
|
||||
cls,
|
||||
*,
|
||||
max_length: int,
|
||||
allow_blank: bool = True,
|
||||
strip_whitespace: bool = False,
|
||||
min_length: int = None,
|
||||
max_length: int = None,
|
||||
curtail_length: int = None,
|
||||
regex: str = None,
|
||||
**kwargs: Any
|
||||
@ -176,7 +176,7 @@ class String(ModelFieldFactory, str):
|
||||
:type kwargs: Any
|
||||
"""
|
||||
max_length = kwargs.get("max_length", None)
|
||||
if max_length is None or max_length <= 0:
|
||||
if max_length <= 0:
|
||||
raise ModelDefinitionError(
|
||||
"Parameter max_length is required for field String"
|
||||
)
|
||||
@ -435,7 +435,7 @@ class LargeBinary(ModelFieldFactory, bytes):
|
||||
_sample = "bytes"
|
||||
|
||||
def __new__( # type: ignore # noqa CFQ002
|
||||
cls, *, max_length: int = None, **kwargs: Any
|
||||
cls, *, max_length: int, **kwargs: Any
|
||||
) -> BaseField: # type: ignore
|
||||
kwargs = {
|
||||
**kwargs,
|
||||
@ -468,7 +468,7 @@ class LargeBinary(ModelFieldFactory, bytes):
|
||||
:type kwargs: Any
|
||||
"""
|
||||
max_length = kwargs.get("max_length", None)
|
||||
if max_length is None or max_length <= 0:
|
||||
if max_length <= 0:
|
||||
raise ModelDefinitionError(
|
||||
"Parameter max_length is required for field LargeBinary"
|
||||
)
|
||||
|
||||
@ -151,7 +151,7 @@ class ExcludableMixin(RelationMixin):
|
||||
:return: set or dict with excluded fields added.
|
||||
:rtype: Union[Set, Dict]
|
||||
"""
|
||||
exclude = exclude or {}
|
||||
exclude = exclude or set()
|
||||
related_set = cls.extract_related_names()
|
||||
if isinstance(exclude, set):
|
||||
exclude = {s for s in exclude}
|
||||
@ -162,6 +162,26 @@ class ExcludableMixin(RelationMixin):
|
||||
exclude = exclude.union(related_set)
|
||||
return exclude
|
||||
|
||||
@classmethod
|
||||
def _update_excluded_with_pks_and_through(
|
||||
cls, exclude: Set, exclude_primary_keys: bool, exclude_through_models: bool
|
||||
) -> Set:
|
||||
"""
|
||||
Updates excluded names with name of pk column if exclude flag is set.
|
||||
|
||||
:param exclude: set of names to exclude
|
||||
:type exclude: Set
|
||||
:param exclude_primary_keys: flag if the primary keys should be excluded
|
||||
:type exclude_primary_keys: bool
|
||||
:return: set updated with pk if flag is set
|
||||
:rtype: Set
|
||||
"""
|
||||
if exclude_primary_keys:
|
||||
exclude.add(cls.Meta.pkname)
|
||||
if exclude_through_models:
|
||||
exclude = exclude.union(cls.extract_through_names())
|
||||
return exclude
|
||||
|
||||
@classmethod
|
||||
def get_names_to_exclude(cls, excludable: ExcludableItems, alias: str) -> Set:
|
||||
"""
|
||||
|
||||
@ -562,6 +562,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
models: MutableSequence,
|
||||
include: Union[Set, Dict, None],
|
||||
exclude: Union[Set, Dict, None],
|
||||
exclude_primary_keys: bool,
|
||||
exclude_through_models: bool,
|
||||
) -> List:
|
||||
"""
|
||||
Converts list of models into list of dictionaries.
|
||||
@ -580,7 +582,11 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
try:
|
||||
result.append(
|
||||
model.dict(
|
||||
relation_map=relation_map, include=include, exclude=exclude,
|
||||
relation_map=relation_map,
|
||||
include=include,
|
||||
exclude=exclude,
|
||||
exclude_primary_keys=exclude_primary_keys,
|
||||
exclude_through_models=exclude_through_models,
|
||||
)
|
||||
)
|
||||
except ReferenceError: # pragma no cover
|
||||
@ -623,6 +629,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
dict_instance: Dict,
|
||||
include: Optional[Dict],
|
||||
exclude: Optional[Dict],
|
||||
exclude_primary_keys: bool,
|
||||
exclude_through_models: bool,
|
||||
) -> Dict:
|
||||
"""
|
||||
Traverse nested models and converts them into dictionaries.
|
||||
@ -655,6 +663,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
models=nested_model,
|
||||
include=self._convert_all(self._skip_ellipsis(include, field)),
|
||||
exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
|
||||
exclude_primary_keys=exclude_primary_keys,
|
||||
exclude_through_models=exclude_through_models,
|
||||
)
|
||||
elif nested_model is not None:
|
||||
|
||||
@ -664,6 +674,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
),
|
||||
include=self._convert_all(self._skip_ellipsis(include, field)),
|
||||
exclude=self._convert_all(self._skip_ellipsis(exclude, field)),
|
||||
exclude_primary_keys=exclude_primary_keys,
|
||||
exclude_through_models=exclude_through_models,
|
||||
)
|
||||
else:
|
||||
dict_instance[field] = None
|
||||
@ -681,6 +693,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
exclude_unset: bool = False,
|
||||
exclude_defaults: bool = False,
|
||||
exclude_none: bool = False,
|
||||
exclude_primary_keys: bool = False,
|
||||
exclude_through_models: bool = False,
|
||||
relation_map: Dict = None,
|
||||
) -> "DictStrAny": # noqa: A003'
|
||||
"""
|
||||
@ -692,6 +706,10 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
|
||||
Additionally fields decorated with @property_field are also added.
|
||||
|
||||
:param exclude_through_models: flag to exclude through models from dict
|
||||
:type exclude_through_models: bool
|
||||
:param exclude_primary_keys: flag to exclude primary keys from dict
|
||||
:type exclude_primary_keys: bool
|
||||
:param include: fields to include
|
||||
:type include: Union[Set, Dict, None]
|
||||
:param exclude: fields to exclude
|
||||
@ -711,9 +729,15 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
:return:
|
||||
:rtype:
|
||||
"""
|
||||
pydantic_exclude = self._update_excluded_with_related(exclude)
|
||||
pydantic_exclude = self._update_excluded_with_pks_and_through(
|
||||
exclude=pydantic_exclude,
|
||||
exclude_primary_keys=exclude_primary_keys,
|
||||
exclude_through_models=exclude_through_models,
|
||||
)
|
||||
dict_instance = super().dict(
|
||||
include=include,
|
||||
exclude=self._update_excluded_with_related(exclude),
|
||||
exclude=pydantic_exclude,
|
||||
by_alias=by_alias,
|
||||
skip_defaults=skip_defaults,
|
||||
exclude_unset=exclude_unset,
|
||||
@ -738,6 +762,8 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass
|
||||
dict_instance=dict_instance,
|
||||
include=include, # type: ignore
|
||||
exclude=exclude, # type: ignore
|
||||
exclude_primary_keys=exclude_primary_keys,
|
||||
exclude_through_models=exclude_through_models,
|
||||
)
|
||||
|
||||
# include model properties as fields in dict
|
||||
|
||||
@ -893,7 +893,7 @@ class QuerySet(Generic[T]):
|
||||
"""
|
||||
Returns all rows from a database for given model for set filter options.
|
||||
|
||||
Passing args and/or kwargs is a shortcut and equals to calling
|
||||
Passing args and/or kwargs is a shortcut and equals to calling
|
||||
`filter(*args, **kwargs).all()`.
|
||||
|
||||
If there are no rows meeting the criteria an empty list is returned.
|
||||
|
||||
@ -1,4 +1,4 @@
|
||||
from typing import Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import databases
|
||||
import pytest
|
||||
@ -11,16 +11,28 @@ metadata = sqlalchemy.MetaData()
|
||||
database = databases.Database(DATABASE_URL, force_rollback=True)
|
||||
|
||||
|
||||
class MainMeta(ormar.ModelMeta):
|
||||
metadata = metadata
|
||||
database = database
|
||||
|
||||
|
||||
class Role(ormar.Model):
|
||||
class Meta(MainMeta):
|
||||
pass
|
||||
|
||||
id: int = ormar.Integer(primary_key=True)
|
||||
name: str = ormar.String(max_length=255, nullable=False)
|
||||
|
||||
|
||||
class User(ormar.Model):
|
||||
class Meta:
|
||||
class Meta(MainMeta):
|
||||
tablename: str = "users"
|
||||
metadata = metadata
|
||||
database = database
|
||||
|
||||
id: int = ormar.Integer(primary_key=True)
|
||||
email: str = ormar.String(max_length=255, nullable=False)
|
||||
password: str = ormar.String(max_length=255, nullable=True)
|
||||
first_name: str = ormar.String(max_length=255, nullable=False)
|
||||
roles: List[Role] = ormar.ManyToMany(Role)
|
||||
|
||||
|
||||
class Tier(ormar.Model):
|
||||
@ -58,12 +70,20 @@ class Item(ormar.Model):
|
||||
|
||||
@pytest.fixture(autouse=True, scope="module")
|
||||
def sample_data():
|
||||
user = User(email="test@test.com", password="ijacids7^*&", first_name="Anna")
|
||||
tier = Tier(name="Tier I")
|
||||
category1 = Category(name="Toys", tier=tier)
|
||||
category2 = Category(name="Weapons", tier=tier)
|
||||
item1 = Item(name="Teddy Bear", category=category1, created_by=user)
|
||||
item2 = Item(name="M16", category=category2, created_by=user)
|
||||
role = Role(name="User", id=1)
|
||||
role2 = Role(name="Admin", id=2)
|
||||
user = User(
|
||||
id=1,
|
||||
email="test@test.com",
|
||||
password="ijacids7^*&",
|
||||
first_name="Anna",
|
||||
roles=[role, role2],
|
||||
)
|
||||
tier = Tier(id=1, name="Tier I")
|
||||
category1 = Category(id=1, name="Toys", tier=tier)
|
||||
category2 = Category(id=2, name="Weapons", tier=tier)
|
||||
item1 = Item(id=1, name="Teddy Bear", category=category1, created_by=user)
|
||||
item2 = Item(id=2, name="M16", category=category2, created_by=user)
|
||||
return item1, item2
|
||||
|
||||
|
||||
@ -139,4 +159,30 @@ def test_dumping_to_dict_exclude_and_include_nested_dict(sample_data):
|
||||
assert dict2["category"]["name"] == "Toys"
|
||||
assert "created_by" not in dict1
|
||||
assert dict1["category"]["tier"].get("name") is None
|
||||
assert dict1["category"]["tier"]["id"] is None
|
||||
assert dict1["category"]["tier"]["id"] == 1
|
||||
|
||||
|
||||
def test_dumping_dict_without_primary_keys(sample_data):
|
||||
item1, item2 = sample_data
|
||||
dict1 = item2.dict(exclude_primary_keys=True)
|
||||
assert dict1 == {
|
||||
"category": {"name": "Weapons", "tier": {"name": "Tier I"}},
|
||||
"created_by": {
|
||||
"email": "test@test.com",
|
||||
"first_name": "Anna",
|
||||
"password": "ijacids7^*&",
|
||||
"roles": [{"name": "User"}, {"name": "Admin"}],
|
||||
},
|
||||
"name": "M16",
|
||||
}
|
||||
dict2 = item1.dict(exclude_primary_keys=True)
|
||||
assert dict2 == {
|
||||
"category": {"name": "Toys", "tier": {"name": "Tier I"}},
|
||||
"created_by": {
|
||||
"email": "test@test.com",
|
||||
"first_name": "Anna",
|
||||
"password": "ijacids7^*&",
|
||||
"roles": [{"name": "User"}, {"name": "Admin"}],
|
||||
},
|
||||
"name": "Teddy Bear",
|
||||
}
|
||||
|
||||
149
tests/test_fastapi/test_excluding_fields.py
Normal file
149
tests/test_fastapi/test_excluding_fields.py
Normal file
@ -0,0 +1,149 @@
|
||||
from typing import List
|
||||
|
||||
import databases
|
||||
import pytest
|
||||
import sqlalchemy
|
||||
from fastapi import FastAPI
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
import ormar
|
||||
from tests.settings import DATABASE_URL
|
||||
|
||||
app = FastAPI()
|
||||
metadata = sqlalchemy.MetaData()
|
||||
database = databases.Database(DATABASE_URL, force_rollback=True)
|
||||
app.state.database = database
|
||||
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup() -> None:
|
||||
database_ = app.state.database
|
||||
if not database_.is_connected:
|
||||
await database_.connect()
|
||||
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown() -> None:
|
||||
database_ = app.state.database
|
||||
if database_.is_connected:
|
||||
await database_.disconnect()
|
||||
|
||||
|
||||
class Category(ormar.Model):
|
||||
class Meta:
|
||||
tablename = "categories"
|
||||
metadata = metadata
|
||||
database = database
|
||||
|
||||
id: int = ormar.Integer(primary_key=True)
|
||||
name: str = ormar.String(max_length=100)
|
||||
|
||||
|
||||
class Item(ormar.Model):
|
||||
class Meta:
|
||||
tablename = "items"
|
||||
metadata = metadata
|
||||
database = database
|
||||
|
||||
id: int = ormar.Integer(primary_key=True)
|
||||
name: str = ormar.String(max_length=100)
|
||||
categories: List[Category] = ormar.ManyToMany(Category)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True, scope="module")
|
||||
def create_test_database():
|
||||
engine = sqlalchemy.create_engine(DATABASE_URL)
|
||||
metadata.create_all(engine)
|
||||
yield
|
||||
metadata.drop_all(engine)
|
||||
|
||||
|
||||
@app.post("/items/", response_model=Item)
|
||||
async def create_item(item: Item):
|
||||
await item.save_related(follow=True, save_all=True)
|
||||
return item
|
||||
|
||||
|
||||
@app.get("/items/{item_id}")
|
||||
async def get_item(item_id: int):
|
||||
item = await Item.objects.select_related("categories").get(pk=item_id)
|
||||
return item.dict(exclude_primary_keys=True, exclude_through_models=True)
|
||||
|
||||
|
||||
@app.get("/categories/{category_id}")
|
||||
async def get_category(category_id: int):
|
||||
category = await Category.objects.select_related("items").get(pk=category_id)
|
||||
return category.dict(exclude_primary_keys=True)
|
||||
|
||||
|
||||
@app.get("/categories/nt/{category_id}")
|
||||
async def get_category_no_through(category_id: int):
|
||||
category = await Category.objects.select_related("items").get(pk=category_id)
|
||||
return category.dict(exclude_through_models=True)
|
||||
|
||||
|
||||
@app.get("/categories/ntp/{category_id}")
|
||||
async def get_category_no_pk_through(category_id: int):
|
||||
category = await Category.objects.select_related("items").get(pk=category_id)
|
||||
return category.dict(exclude_through_models=True, exclude_primary_keys=True)
|
||||
|
||||
|
||||
@app.get(
|
||||
"/items/fex/{item_id}",
|
||||
response_model=Item,
|
||||
response_model_exclude={
|
||||
"id",
|
||||
"categories__id",
|
||||
"categories__itemcategory",
|
||||
"categories__items",
|
||||
},
|
||||
)
|
||||
async def get_item_excl(item_id: int):
|
||||
item = await Item.objects.select_all().get(pk=item_id)
|
||||
return item
|
||||
|
||||
|
||||
def test_all_endpoints():
|
||||
client = TestClient(app)
|
||||
with client as client:
|
||||
item = {
|
||||
"name": "test",
|
||||
"categories": [{"name": "test cat"}, {"name": "test cat2"}],
|
||||
}
|
||||
response = client.post("/items/", json=item)
|
||||
item_check = Item(**response.json())
|
||||
assert item_check.id is not None
|
||||
assert item_check.categories[0].id is not None
|
||||
|
||||
no_pk_item = client.get(f"/items/{item_check.id}", json=item).json()
|
||||
assert no_pk_item == item
|
||||
|
||||
no_pk_item2 = client.get(f"/items/fex/{item_check.id}", json=item).json()
|
||||
assert no_pk_item2 == item
|
||||
|
||||
no_pk_category = client.get(
|
||||
f"/categories/{item_check.categories[0].id}", json=item
|
||||
).json()
|
||||
assert no_pk_category == {
|
||||
"items": [
|
||||
{
|
||||
"itemcategory": {"category": None, "id": 1, "item": None},
|
||||
"name": "test",
|
||||
}
|
||||
],
|
||||
"name": "test cat",
|
||||
}
|
||||
|
||||
no_through_category = client.get(
|
||||
f"/categories/nt/{item_check.categories[0].id}", json=item
|
||||
).json()
|
||||
assert no_through_category == {
|
||||
"id": 1,
|
||||
"items": [{"id": 1, "name": "test"}],
|
||||
"name": "test cat",
|
||||
}
|
||||
|
||||
no_through_category = client.get(
|
||||
f"/categories/ntp/{item_check.categories[0].id}", json=item
|
||||
).json()
|
||||
assert no_through_category == {"items": [{"name": "test"}], "name": "test cat"}
|
||||
@ -228,7 +228,7 @@ def test_binary_error_without_length_model_definition():
|
||||
database = database
|
||||
metadata = metadata
|
||||
|
||||
test: bytes = ormar.LargeBinary(primary_key=True)
|
||||
test: bytes = ormar.LargeBinary(primary_key=True, max_length=-1)
|
||||
|
||||
|
||||
@typing.no_type_check
|
||||
@ -241,7 +241,7 @@ def test_string_error_in_model_definition():
|
||||
database = database
|
||||
metadata = metadata
|
||||
|
||||
test: str = ormar.String(primary_key=True)
|
||||
test: str = ormar.String(primary_key=True, max_length=0)
|
||||
|
||||
|
||||
@typing.no_type_check
|
||||
|
||||
Reference in New Issue
Block a user