finish implementing values, missing docstrings and docs
This commit is contained in:
@ -28,6 +28,7 @@ class BaseMeta(ormar.ModelMeta):
|
|||||||
# id = ormar.Integer(primary_key=True) # <= notice no field types
|
# id = ormar.Integer(primary_key=True) # <= notice no field types
|
||||||
# name = ormar.String(max_length=100)
|
# name = ormar.String(max_length=100)
|
||||||
|
|
||||||
|
|
||||||
class Author(ormar.Model):
|
class Author(ormar.Model):
|
||||||
class Meta(BaseMeta):
|
class Meta(BaseMeta):
|
||||||
tablename = "authors"
|
tablename = "authors"
|
||||||
@ -62,15 +63,9 @@ async def create():
|
|||||||
# Create some records to work with through QuerySet.create method.
|
# Create some records to work with through QuerySet.create method.
|
||||||
# Note that queryset is exposed on each Model's class as objects
|
# Note that queryset is exposed on each Model's class as objects
|
||||||
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
|
tolkien = await Author.objects.create(name="J.R.R. Tolkien")
|
||||||
await Book.objects.create(author=tolkien,
|
await Book.objects.create(author=tolkien, title="The Hobbit", year=1937)
|
||||||
title="The Hobbit",
|
await Book.objects.create(author=tolkien, title="The Lord of the Rings", year=1955)
|
||||||
year=1937)
|
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
|
||||||
await Book.objects.create(author=tolkien,
|
|
||||||
title="The Lord of the Rings",
|
|
||||||
year=1955)
|
|
||||||
await Book.objects.create(author=tolkien,
|
|
||||||
title="The Silmarillion",
|
|
||||||
year=1977)
|
|
||||||
|
|
||||||
# alternative creation of object divided into 2 steps
|
# alternative creation of object divided into 2 steps
|
||||||
sapkowski = Author(name="Andrzej Sapkowski")
|
sapkowski = Author(name="Andrzej Sapkowski")
|
||||||
@ -169,9 +164,7 @@ async def delete():
|
|||||||
# note that despite the fact that record no longer exists in database
|
# note that despite the fact that record no longer exists in database
|
||||||
# the object above is still accessible and you can use it (and i.e. save()) again.
|
# the object above is still accessible and you can use it (and i.e. save()) again.
|
||||||
tolkien = silmarillion.author
|
tolkien = silmarillion.author
|
||||||
await Book.objects.create(author=tolkien,
|
await Book.objects.create(author=tolkien, title="The Silmarillion", year=1977)
|
||||||
title="The Silmarillion",
|
|
||||||
year=1977)
|
|
||||||
|
|
||||||
|
|
||||||
async def joins():
|
async def joins():
|
||||||
@ -223,11 +216,17 @@ async def filter_and_sort():
|
|||||||
# to sort decreasing use hyphen before the field name
|
# to sort decreasing use hyphen before the field name
|
||||||
# same as with filter you can use double underscores to access related fields
|
# same as with filter you can use double underscores to access related fields
|
||||||
# Django style
|
# Django style
|
||||||
books = await Book.objects.filter(author__name__icontains="tolkien").order_by(
|
books = (
|
||||||
"-year").all()
|
await Book.objects.filter(author__name__icontains="tolkien")
|
||||||
|
.order_by("-year")
|
||||||
|
.all()
|
||||||
|
)
|
||||||
# python style
|
# python style
|
||||||
books = await Book.objects.filter(Book.author.name.icontains("tolkien")).order_by(
|
books = (
|
||||||
Book.year.desc()).all()
|
await Book.objects.filter(Book.author.name.icontains("tolkien"))
|
||||||
|
.order_by(Book.year.desc())
|
||||||
|
.all()
|
||||||
|
)
|
||||||
assert len(books) == 3
|
assert len(books) == 3
|
||||||
assert books[0].title == "The Silmarillion"
|
assert books[0].title == "The Silmarillion"
|
||||||
assert books[2].title == "The Hobbit"
|
assert books[2].title == "The Hobbit"
|
||||||
@ -329,17 +328,26 @@ async def with_connect(function):
|
|||||||
# in your endpoints but have a global connection pool
|
# in your endpoints but have a global connection pool
|
||||||
# check https://collerek.github.io/ormar/fastapi/ and section with db connection
|
# check https://collerek.github.io/ormar/fastapi/ and section with db connection
|
||||||
|
|
||||||
|
|
||||||
# gather and execute all functions
|
# gather and execute all functions
|
||||||
# note - normally import should be at the beginning of the file
|
# note - normally import should be at the beginning of the file
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
# note that normally you use gather() function to run several functions
|
# note that normally you use gather() function to run several functions
|
||||||
# concurrently but we actually modify the data and we rely on the order of functions
|
# concurrently but we actually modify the data and we rely on the order of functions
|
||||||
for func in [create, read, update, delete, joins,
|
for func in [
|
||||||
filter_and_sort, subset_of_columns,
|
create,
|
||||||
pagination, aggregations]:
|
read,
|
||||||
|
update,
|
||||||
|
delete,
|
||||||
|
joins,
|
||||||
|
filter_and_sort,
|
||||||
|
subset_of_columns,
|
||||||
|
pagination,
|
||||||
|
aggregations,
|
||||||
|
]:
|
||||||
print(f"Executing: {func.__name__}")
|
print(f"Executing: {func.__name__}")
|
||||||
asyncio.run(with_connect(func))
|
asyncio.run(with_connect(func))
|
||||||
|
|
||||||
# drop the database tables
|
# drop the database tables
|
||||||
metadata.drop_all(engine)
|
metadata.drop_all(engine)
|
||||||
|
|||||||
@ -92,8 +92,6 @@ class ExcludableItems:
|
|||||||
Returns count of include items inside
|
Returns count of include items inside
|
||||||
"""
|
"""
|
||||||
count = 0
|
count = 0
|
||||||
if not self.items:
|
|
||||||
return count
|
|
||||||
for key in self.items.keys():
|
for key in self.items.keys():
|
||||||
count += len(self.items[key].include)
|
count += len(self.items[key].include)
|
||||||
return count
|
return count
|
||||||
|
|||||||
@ -559,6 +559,7 @@ class QuerySet(Generic[T]):
|
|||||||
async def values(
|
async def values(
|
||||||
self,
|
self,
|
||||||
fields: Union[List, str, Set, Dict] = None,
|
fields: Union[List, str, Set, Dict] = None,
|
||||||
|
exclude_through: bool = False,
|
||||||
_as_dict: bool = True,
|
_as_dict: bool = True,
|
||||||
_flatten: bool = False,
|
_flatten: bool = False,
|
||||||
) -> List:
|
) -> List:
|
||||||
@ -571,6 +572,8 @@ class QuerySet(Generic[T]):
|
|||||||
|
|
||||||
Note that it always return a list even for one row from database.
|
Note that it always return a list even for one row from database.
|
||||||
|
|
||||||
|
:param exclude_through: flag if through models should be excluded
|
||||||
|
:type exclude_through: bool
|
||||||
:param _flatten: internal parameter to flatten one element tuples
|
:param _flatten: internal parameter to flatten one element tuples
|
||||||
:type _flatten: bool
|
:type _flatten: bool
|
||||||
:param _as_dict: internal parameter if return dict or tuples
|
:param _as_dict: internal parameter if return dict or tuples
|
||||||
@ -580,7 +583,7 @@ class QuerySet(Generic[T]):
|
|||||||
"""
|
"""
|
||||||
if fields:
|
if fields:
|
||||||
return await self.fields(columns=fields).values(
|
return await self.fields(columns=fields).values(
|
||||||
_as_dict=_as_dict, _flatten=_flatten
|
_as_dict=_as_dict, _flatten=_flatten, exclude_through=exclude_through
|
||||||
)
|
)
|
||||||
expr = self.build_select_expression()
|
expr = self.build_select_expression()
|
||||||
rows = await self.database.fetch_all(expr)
|
rows = await self.database.fetch_all(expr)
|
||||||
@ -589,7 +592,8 @@ class QuerySet(Generic[T]):
|
|||||||
alias_resolver = ReverseAliasResolver(
|
alias_resolver = ReverseAliasResolver(
|
||||||
select_related=self._select_related,
|
select_related=self._select_related,
|
||||||
excludable=self._excludable,
|
excludable=self._excludable,
|
||||||
model_cls=self.model_cls,
|
model_cls=self.model_cls, # type: ignore
|
||||||
|
exclude_through=exclude_through,
|
||||||
)
|
)
|
||||||
column_map = alias_resolver.resolve_columns(columns_names=list(rows[0].keys()))
|
column_map = alias_resolver.resolve_columns(columns_names=list(rows[0].keys()))
|
||||||
result = [
|
result = [
|
||||||
@ -606,7 +610,10 @@ class QuerySet(Generic[T]):
|
|||||||
return tuple_result if not _flatten else [x[0] for x in tuple_result]
|
return tuple_result if not _flatten else [x[0] for x in tuple_result]
|
||||||
|
|
||||||
async def values_list(
|
async def values_list(
|
||||||
self, fields: Union[List, str, Set, Dict] = None, flatten: bool = False
|
self,
|
||||||
|
fields: Union[List, str, Set, Dict] = None,
|
||||||
|
flatten: bool = False,
|
||||||
|
exclude_through: bool = False,
|
||||||
) -> List:
|
) -> List:
|
||||||
"""
|
"""
|
||||||
Return a list of tuples with column values in order of the fields passed or
|
Return a list of tuples with column values in order of the fields passed or
|
||||||
@ -620,12 +627,19 @@ class QuerySet(Generic[T]):
|
|||||||
|
|
||||||
Note that it always return a list even for one row from database.
|
Note that it always return a list even for one row from database.
|
||||||
|
|
||||||
|
:param exclude_through: flag if through models should be excluded
|
||||||
|
:type exclude_through: bool
|
||||||
:param fields: field name or list of field names to extract from db
|
:param fields: field name or list of field names to extract from db
|
||||||
:type fields: Union[str, List[str]]
|
:type fields: Union[str, List[str]]
|
||||||
:param flatten: when one field is passed you can flatten the list of tuples
|
:param flatten: when one field is passed you can flatten the list of tuples
|
||||||
:type flatten: bool
|
:type flatten: bool
|
||||||
"""
|
"""
|
||||||
return await self.values(fields=fields, _as_dict=False, _flatten=flatten)
|
return await self.values(
|
||||||
|
fields=fields,
|
||||||
|
exclude_through=exclude_through,
|
||||||
|
_as_dict=False,
|
||||||
|
_flatten=flatten,
|
||||||
|
)
|
||||||
|
|
||||||
async def exists(self) -> bool:
|
async def exists(self) -> bool:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -1,8 +1,8 @@
|
|||||||
from typing import Dict, List, TYPE_CHECKING, Tuple, Type
|
from typing import Dict, List, TYPE_CHECKING, Type, cast
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING: # pragma: no cover
|
||||||
from ormar import Model
|
from ormar import ForeignKeyField, Model
|
||||||
from ormar.models.excludable import ExcludableItems
|
from ormar.models.excludable import Excludable, ExcludableItems
|
||||||
|
|
||||||
|
|
||||||
class ReverseAliasResolver:
|
class ReverseAliasResolver:
|
||||||
@ -11,33 +11,28 @@ class ReverseAliasResolver:
|
|||||||
model_cls: Type["Model"],
|
model_cls: Type["Model"],
|
||||||
excludable: "ExcludableItems",
|
excludable: "ExcludableItems",
|
||||||
select_related: List[str],
|
select_related: List[str],
|
||||||
|
exclude_through: bool = False,
|
||||||
) -> None:
|
) -> None:
|
||||||
self.select_related = select_related
|
self.select_related = select_related
|
||||||
self.model_cls = model_cls
|
self.model_cls = model_cls
|
||||||
self.reversed_aliases = self.model_cls.Meta.alias_manager.reversed_aliases
|
self.reversed_aliases = self.model_cls.Meta.alias_manager.reversed_aliases
|
||||||
self.excludable = excludable
|
self.excludable = excludable
|
||||||
|
self.exclude_through = exclude_through
|
||||||
|
|
||||||
|
self._fields: Dict[str, "ForeignKeyField"] = dict()
|
||||||
|
self._prefixes: Dict[str, str] = dict()
|
||||||
|
self._previous_prefixes: List[str] = [""]
|
||||||
|
self._resolved_names: Dict[str, str] = dict()
|
||||||
|
|
||||||
def resolve_columns(self, columns_names: List[str]) -> Dict:
|
def resolve_columns(self, columns_names: List[str]) -> Dict:
|
||||||
resolved_names = dict()
|
self._create_prefixes_map()
|
||||||
prefixes, target_models = self._create_prefixes_map()
|
|
||||||
for column_name in columns_names:
|
for column_name in columns_names:
|
||||||
column_parts = column_name.split("_")
|
column_parts = column_name.split("_")
|
||||||
potential_prefix = column_parts[0]
|
potential_prefix = column_parts[0]
|
||||||
if potential_prefix in self.reversed_aliases:
|
if potential_prefix in self.reversed_aliases:
|
||||||
relation = self.reversed_aliases[potential_prefix]
|
self._resolve_column_with_prefix(
|
||||||
relation_str = prefixes[relation]
|
column_name=column_name, prefix=potential_prefix
|
||||||
target_model = target_models[relation]
|
|
||||||
allowed_columns = target_model.own_table_columns(
|
|
||||||
model=target_model,
|
|
||||||
excludable=self.excludable,
|
|
||||||
alias=potential_prefix,
|
|
||||||
add_pk_columns=False,
|
|
||||||
)
|
)
|
||||||
new_column_name = column_name.replace(f"{potential_prefix}_", "")
|
|
||||||
if new_column_name in allowed_columns:
|
|
||||||
resolved_names[column_name] = column_name.replace(
|
|
||||||
f"{potential_prefix}_", f"{relation_str}__"
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
allowed_columns = self.model_cls.own_table_columns(
|
allowed_columns = self.model_cls.own_table_columns(
|
||||||
model=self.model_cls,
|
model=self.model_cls,
|
||||||
@ -45,35 +40,96 @@ class ReverseAliasResolver:
|
|||||||
add_pk_columns=False,
|
add_pk_columns=False,
|
||||||
)
|
)
|
||||||
if column_name in allowed_columns:
|
if column_name in allowed_columns:
|
||||||
resolved_names[column_name] = column_name
|
self._resolved_names[column_name] = column_name
|
||||||
|
|
||||||
return resolved_names
|
return self._resolved_names
|
||||||
|
|
||||||
def _create_prefixes_map(self) -> Tuple[Dict, Dict]:
|
def _resolve_column_with_prefix(self, column_name: str, prefix: str) -> None:
|
||||||
prefixes: Dict = dict()
|
relation = self.reversed_aliases.get(prefix, None)
|
||||||
target_models: Dict = dict()
|
relation_str = self._prefixes.get(relation, None)
|
||||||
|
field = self._fields.get(relation, None)
|
||||||
|
if relation_str is None or field is None:
|
||||||
|
return
|
||||||
|
is_through = field.is_multi and field.through.get_name() in relation_str
|
||||||
|
if self._check_if_field_is_excluded(
|
||||||
|
prefix=prefix, field=field, is_through=is_through
|
||||||
|
):
|
||||||
|
return
|
||||||
|
|
||||||
|
target_model = field.through if is_through else field.to
|
||||||
|
allowed_columns = target_model.own_table_columns(
|
||||||
|
model=target_model,
|
||||||
|
excludable=self.excludable,
|
||||||
|
alias=prefix,
|
||||||
|
add_pk_columns=False,
|
||||||
|
)
|
||||||
|
new_column_name = column_name.replace(f"{prefix}_", "")
|
||||||
|
if new_column_name in allowed_columns:
|
||||||
|
self._resolved_names[column_name] = column_name.replace(
|
||||||
|
f"{prefix}_", f"{relation_str}__"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _check_if_field_is_excluded(
|
||||||
|
self, prefix: str, field: "ForeignKeyField", is_through: bool
|
||||||
|
) -> bool:
|
||||||
|
shift, field_name = 1, field.name
|
||||||
|
if is_through:
|
||||||
|
field_name = field.through.get_name()
|
||||||
|
elif field.is_multi:
|
||||||
|
shift = 2
|
||||||
|
previous_excludable = self._get_previous_excludable(
|
||||||
|
prefix=prefix, field=field, shift=shift
|
||||||
|
)
|
||||||
|
return previous_excludable.is_excluded(field_name)
|
||||||
|
|
||||||
|
def _get_previous_excludable(
|
||||||
|
self, prefix: str, field: "ForeignKeyField", shift: int = 1
|
||||||
|
) -> "Excludable":
|
||||||
|
if prefix not in self._previous_prefixes:
|
||||||
|
self._previous_prefixes.append(prefix)
|
||||||
|
previous_prefix_ind = self._previous_prefixes.index(prefix)
|
||||||
|
previous_prefix = (
|
||||||
|
self._previous_prefixes[previous_prefix_ind - shift]
|
||||||
|
if previous_prefix_ind > (shift - 1)
|
||||||
|
else ""
|
||||||
|
)
|
||||||
|
return self.excludable.get(field.owner, alias=previous_prefix)
|
||||||
|
|
||||||
|
def _create_prefixes_map(self) -> None:
|
||||||
for related in self.select_related:
|
for related in self.select_related:
|
||||||
model_cls = self.model_cls
|
model_cls = self.model_cls
|
||||||
related_split = related.split("__")
|
related_split = related.split("__")
|
||||||
related_str = ""
|
related_str = ""
|
||||||
for related in related_split:
|
for relation in related_split:
|
||||||
prefix_name = f"{model_cls.get_name()}_{related}"
|
previous_related_str = f"{related_str}__" if related_str else ""
|
||||||
new_related_str = (f"{related_str}__" if related_str else "") + related
|
new_related_str = previous_related_str + relation
|
||||||
prefixes[prefix_name] = new_related_str
|
field = model_cls.Meta.model_fields[relation]
|
||||||
field = model_cls.Meta.model_fields[related]
|
field = cast("ForeignKeyField", field)
|
||||||
target_models[prefix_name] = field.to
|
prefix_name = self._handle_through_fields_and_prefix(
|
||||||
if field.is_multi:
|
model_cls=model_cls,
|
||||||
target_models[prefix_name] = field.through
|
field=field,
|
||||||
new_through_str = (
|
previous_related_str=previous_related_str,
|
||||||
f"{related_str}__" if related_str else ""
|
relation=relation,
|
||||||
) + field.through.get_name()
|
)
|
||||||
prefixes[prefix_name] = new_through_str
|
|
||||||
prefix_name = (
|
self._prefixes[prefix_name] = new_related_str
|
||||||
f"{field.through.get_name()}_"
|
self._fields[prefix_name] = field
|
||||||
f"{field.default_target_field_name()}"
|
|
||||||
)
|
|
||||||
prefixes[prefix_name] = new_related_str
|
|
||||||
target_models[prefix_name] = field.to
|
|
||||||
model_cls = field.to
|
model_cls = field.to
|
||||||
related_str = new_related_str
|
related_str = new_related_str
|
||||||
return prefixes, target_models
|
|
||||||
|
def _handle_through_fields_and_prefix(
|
||||||
|
self,
|
||||||
|
model_cls: Type["Model"],
|
||||||
|
field: "ForeignKeyField",
|
||||||
|
previous_related_str: str,
|
||||||
|
relation: str,
|
||||||
|
) -> str:
|
||||||
|
prefix_name = f"{model_cls.get_name()}_{relation}"
|
||||||
|
if field.is_multi:
|
||||||
|
through_name = field.through.get_name()
|
||||||
|
if not self.exclude_through:
|
||||||
|
self._fields[prefix_name] = field
|
||||||
|
new_through_str = previous_related_str + through_name
|
||||||
|
self._prefixes[prefix_name] = new_through_str
|
||||||
|
prefix_name = f"{through_name}_{field.default_target_field_name()}"
|
||||||
|
return prefix_name
|
||||||
|
|||||||
22
setup.py
22
setup.py
@ -45,8 +45,15 @@ setup(
|
|||||||
description="A simple async ORM with fastapi in mind and pydantic validation.",
|
description="A simple async ORM with fastapi in mind and pydantic validation.",
|
||||||
long_description=get_long_description(),
|
long_description=get_long_description(),
|
||||||
long_description_content_type="text/markdown",
|
long_description_content_type="text/markdown",
|
||||||
keywords=['orm', 'sqlalchemy', 'fastapi', 'pydantic', 'databases', 'async',
|
keywords=[
|
||||||
'alembic'],
|
"orm",
|
||||||
|
"sqlalchemy",
|
||||||
|
"fastapi",
|
||||||
|
"pydantic",
|
||||||
|
"databases",
|
||||||
|
"async",
|
||||||
|
"alembic",
|
||||||
|
],
|
||||||
author="Radosław Drążkiewicz",
|
author="Radosław Drążkiewicz",
|
||||||
author_email="collerek@gmail.com",
|
author_email="collerek@gmail.com",
|
||||||
packages=get_packages(PACKAGE),
|
packages=get_packages(PACKAGE),
|
||||||
@ -55,15 +62,18 @@ setup(
|
|||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
python_requires=">=3.6",
|
python_requires=">=3.6",
|
||||||
data_files=[("", ["LICENSE.md"])],
|
data_files=[("", ["LICENSE.md"])],
|
||||||
install_requires=["databases>=0.3.2,<=0.4.1", "pydantic>=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2",
|
install_requires=[
|
||||||
"sqlalchemy>=1.3.18,<=1.3.23",
|
"databases>=0.3.2,<=0.4.1",
|
||||||
"typing_extensions>=3.7,<=3.7.4.3"],
|
"pydantic>=1.6.1,!=1.7,!=1.7.1,!=1.7.2,!=1.7.3,!=1.8,!=1.8.1,<=1.8.2",
|
||||||
|
"sqlalchemy>=1.3.18,<=1.3.23",
|
||||||
|
"typing_extensions>=3.7,<=3.7.4.3",
|
||||||
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
"postgresql": ["asyncpg", "psycopg2"],
|
"postgresql": ["asyncpg", "psycopg2"],
|
||||||
"mysql": ["aiomysql", "pymysql"],
|
"mysql": ["aiomysql", "pymysql"],
|
||||||
"sqlite": ["aiosqlite"],
|
"sqlite": ["aiosqlite"],
|
||||||
"orjson": ["orjson"],
|
"orjson": ["orjson"],
|
||||||
"crypto": ["cryptography"]
|
"crypto": ["cryptography"],
|
||||||
},
|
},
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 4 - Beta",
|
||||||
|
|||||||
Binary file not shown.
@ -79,7 +79,7 @@ def create_test_database():
|
|||||||
metadata.drop_all(engine)
|
metadata.drop_all(engine)
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture(scope="module")
|
@pytest.fixture(scope="module")
|
||||||
def event_loop():
|
def event_loop():
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||||
yield loop
|
yield loop
|
||||||
|
|||||||
@ -6,6 +6,7 @@ import pytest
|
|||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
|
|
||||||
import ormar
|
import ormar
|
||||||
|
from ormar.exceptions import QueryDefinitionError
|
||||||
from tests.settings import DATABASE_URL
|
from tests.settings import DATABASE_URL
|
||||||
|
|
||||||
database = databases.Database(DATABASE_URL)
|
database = databases.Database(DATABASE_URL)
|
||||||
@ -61,7 +62,7 @@ def create_test_database():
|
|||||||
metadata.drop_all(engine)
|
metadata.drop_all(engine)
|
||||||
|
|
||||||
|
|
||||||
@pytest.yield_fixture(scope="module")
|
@pytest.fixture(scope="module")
|
||||||
def event_loop():
|
def event_loop():
|
||||||
loop = asyncio.get_event_loop_policy().new_event_loop()
|
loop = asyncio.get_event_loop_policy().new_event_loop()
|
||||||
yield loop
|
yield loop
|
||||||
@ -256,13 +257,13 @@ async def test_nested_m2m_values():
|
|||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_nested_m2m_values_subset_of_fields():
|
async def test_nested_m2m_values_without_through_explicit():
|
||||||
async with database:
|
async with database:
|
||||||
user = (
|
user = (
|
||||||
await Role.objects.select_related("users__categories")
|
await Role.objects.select_related("users__categories")
|
||||||
.filter(name="admin")
|
.filter(name="admin")
|
||||||
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
|
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
|
||||||
.exclude_fields("users__roleuser")
|
.exclude_fields("roleuser")
|
||||||
.values()
|
.values()
|
||||||
)
|
)
|
||||||
assert user == [
|
assert user == [
|
||||||
@ -272,3 +273,85 @@ async def test_nested_m2m_values_subset_of_fields():
|
|||||||
"users__categories__name": "News",
|
"users__categories__name": "News",
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_nested_m2m_values_without_through_param():
|
||||||
|
async with database:
|
||||||
|
user = (
|
||||||
|
await Role.objects.select_related("users__categories")
|
||||||
|
.filter(name="admin")
|
||||||
|
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
|
||||||
|
.values(exclude_through=True)
|
||||||
|
)
|
||||||
|
assert user == [
|
||||||
|
{
|
||||||
|
"name": "admin",
|
||||||
|
"users__name": "Anonymous",
|
||||||
|
"users__categories__name": "News",
|
||||||
|
}
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_nested_m2m_values_no_through_and_m2m_models_but_keep_end_model():
|
||||||
|
async with database:
|
||||||
|
user = (
|
||||||
|
await Role.objects.select_related("users__categories")
|
||||||
|
.filter(name="admin")
|
||||||
|
.fields({"name": ..., "users": {"name": ..., "categories": {"name"}}})
|
||||||
|
.exclude_fields(["roleuser", "users"])
|
||||||
|
.values()
|
||||||
|
)
|
||||||
|
assert user == [{"name": "admin", "users__categories__name": "News"}]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_nested_flatten_and_exception():
|
||||||
|
async with database:
|
||||||
|
with pytest.raises(QueryDefinitionError):
|
||||||
|
(await Role.objects.fields({"name", "id"}).values_list(flatten=True))
|
||||||
|
|
||||||
|
roles = await Role.objects.fields("name").values_list(flatten=True)
|
||||||
|
assert roles == ["admin", "editor"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_empty_result():
|
||||||
|
async with database:
|
||||||
|
roles = await Role.objects.filter(Role.name == "test").values_list()
|
||||||
|
roles2 = await Role.objects.filter(Role.name == "test").values()
|
||||||
|
assert roles == roles2 == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_queryset_values_multiple_select_related():
|
||||||
|
async with database:
|
||||||
|
posts = (
|
||||||
|
await Category.objects.select_related(["created_by__roles", "posts"])
|
||||||
|
.filter(Category.created_by.roles.name == "editor")
|
||||||
|
.values(
|
||||||
|
["name", "posts__name", "created_by__name", "created_by__roles__name"],
|
||||||
|
exclude_through=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
assert posts == [
|
||||||
|
{
|
||||||
|
"name": "News",
|
||||||
|
"created_by__name": "Anonymous",
|
||||||
|
"created_by__roles__name": "editor",
|
||||||
|
"posts__name": "Ormar strikes again!",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "News",
|
||||||
|
"created_by__name": "Anonymous",
|
||||||
|
"created_by__roles__name": "editor",
|
||||||
|
"posts__name": "Why don't you use ormar yet?",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "News",
|
||||||
|
"created_by__name": "Anonymous",
|
||||||
|
"created_by__roles__name": "editor",
|
||||||
|
"posts__name": "Check this out, ormar now for free",
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user