diff --git a/ormar/fields/foreign_key.py b/ormar/fields/foreign_key.py index a867c39..d347454 100644 --- a/ormar/fields/foreign_key.py +++ b/ormar/fields/foreign_key.py @@ -364,6 +364,64 @@ class ForeignKeyField(BaseField): prefix = "to_" if self.self_reference else "" return self.through_relation_name or f"{prefix}{self.owner.get_name()}" + def get_filter_clause_target(self) -> Type["Model"]: + return self.to + + def get_model_relation_fields(self, use_alias: bool = False) -> str: + """ + Extract names of the database columns or model fields that are connected + with given relation based on use_alias switch and which side of the relation + the current field is - reverse or normal. + + :param use_alias: use db names aliases or model fields + :type use_alias: bool + :return: name or names of the related columns/ fields + :rtype: Union[str, List[str]] + """ + if use_alias: + return self._get_model_relation_fields_alias() + return self._get_model_relation_fields_name() + + def _get_model_relation_fields_name(self) -> str: + if self.virtual: + return self.owner.ormar_config.pkname + return self.name + + def _get_model_relation_fields_alias(self) -> str: + if self.virtual: + return self.owner.ormar_config.model_fields[ + self.owner.ormar_config.pkname + ].get_alias() + return self.get_alias() + + def get_related_field_alias(self) -> str: + """ + Extract names of the related database columns or that are connected + with given relation based to use as a target in filter clause. + + :return: name or names of the related columns/ fields + :rtype: Union[str, Dict[str, str]] + """ + if self.virtual: + field_name = self.get_related_name() + field = self.to.ormar_config.model_fields[field_name] + return field.get_alias() + target_field = self.to.get_column_alias(self.to.ormar_config.pkname) + return target_field + + def get_related_field_name(self) -> Union[str, List[str]]: + """ + Returns name of the relation field that should be used in prefetch query. + This field is later used to register relation in prefetch query, + populate relations dict, and populate nested model in prefetch query. + + :return: name(s) of the field + :rtype: Union[str, List[str]] + """ + if self.virtual: + return self.get_related_name() + return self.to.ormar_config.pkname + def _evaluate_forward_ref( self, globalns: Any, localns: Any, is_through: bool = False ) -> None: diff --git a/ormar/fields/many_to_many.py b/ormar/fields/many_to_many.py index 1a8af2d..329d334 100644 --- a/ormar/fields/many_to_many.py +++ b/ormar/fields/many_to_many.py @@ -268,6 +268,51 @@ class ManyToManyField( # type: ignore """ return self.through + def get_filter_clause_target(self) -> Type["Model"]: + return self.through + + def get_model_relation_fields(self, use_alias: bool = False) -> str: + """ + Extract names of the database columns or model fields that are connected + with given relation based on use_alias switch. + + :param use_alias: use db names aliases or model fields + :type use_alias: bool + :return: name or names of the related columns/ fields + :rtype: Union[str, List[str]] + """ + pk_field = self.owner.ormar_config.model_fields[self.owner.ormar_config.pkname] + result = pk_field.get_alias() if use_alias else pk_field.name + return result + + def get_related_field_alias(self) -> str: + """ + Extract names of the related database columns or that are connected + with given relation based to use as a target in filter clause. + + :return: name or names of the related columns/ fields + :rtype: Union[str, Dict[str, str]] + """ + if self.self_reference and self.self_reference_primary == self.name: + field_name = self.default_target_field_name() + else: + field_name = self.default_source_field_name() + sub_field = self.through.ormar_config.model_fields[field_name] + return sub_field.get_alias() + + def get_related_field_name(self) -> Union[str, List[str]]: + """ + Returns name of the relation field that should be used in prefetch query. + This field is later used to register relation in prefetch query, + populate relations dict, and populate nested model in prefetch query. + + :return: name(s) of the field + :rtype: Union[str, List[str]] + """ + if self.self_reference and self.self_reference_primary == self.name: + return self.default_target_field_name() + return self.default_source_field_name() + def create_default_through_model(self) -> None: """ Creates default empty through model if no additional fields are required. diff --git a/ormar/models/mixins/__init__.py b/ormar/models/mixins/__init__.py index edb5852..1817969 100644 --- a/ormar/models/mixins/__init__.py +++ b/ormar/models/mixins/__init__.py @@ -8,14 +8,12 @@ it became quite complicated over time. from ormar.models.mixins.alias_mixin import AliasMixin from ormar.models.mixins.excludable_mixin import ExcludableMixin from ormar.models.mixins.merge_mixin import MergeModelMixin -from ormar.models.mixins.prefetch_mixin import PrefetchQueryMixin from ormar.models.mixins.pydantic_mixin import PydanticMixin from ormar.models.mixins.save_mixin import SavePrepareMixin __all__ = [ "MergeModelMixin", "AliasMixin", - "PrefetchQueryMixin", "SavePrepareMixin", "ExcludableMixin", "PydanticMixin", diff --git a/ormar/models/mixins/prefetch_mixin.py b/ormar/models/mixins/prefetch_mixin.py deleted file mode 100644 index 61eb29d..0000000 --- a/ormar/models/mixins/prefetch_mixin.py +++ /dev/null @@ -1,123 +0,0 @@ -from typing import TYPE_CHECKING, Callable, Dict, List, Tuple, Type, cast - -from ormar.models.mixins.relation_mixin import RelationMixin - -if TYPE_CHECKING: # pragma: no cover - from ormar.fields import ForeignKeyField, ManyToManyField - - -class PrefetchQueryMixin(RelationMixin): - """ - Used in PrefetchQuery to extract ids and names of models to prefetch. - """ - - if TYPE_CHECKING: # pragma no cover - from ormar import Model - - get_name: Callable # defined in NewBaseModel - - @staticmethod - def get_clause_target_and_filter_column_name( - parent_model: Type["Model"], - target_model: Type["Model"], - reverse: bool, - related: str, - ) -> Tuple[Type["Model"], str]: - """ - Returns Model on which query clause should be performed and name of the column. - - :param parent_model: related model that the relation lead to - :type parent_model: Type[Model] - :param target_model: model on which query should be performed - :type target_model: Type[Model] - :param reverse: flag if the relation is reverse - :type reverse: bool - :param related: name of the relation field - :type related: str - :return: Model on which query clause should be performed and name of the column - :rtype: Tuple[Type[Model], str] - """ - if reverse: - field_name = parent_model.ormar_config.model_fields[ - related - ].get_related_name() - field = target_model.ormar_config.model_fields[field_name] - if field.is_multi: - field = cast("ManyToManyField", field) - field_name = field.default_target_field_name() - sub_field = field.through.ormar_config.model_fields[field_name] - return field.through, sub_field.get_alias() - return target_model, field.get_alias() - target_field = target_model.get_column_alias(target_model.ormar_config.pkname) - return target_model, target_field - - @staticmethod - def get_column_name_for_id_extraction( - parent_model: Type["Model"], reverse: bool, related: str, use_raw: bool - ) -> str: - """ - Returns name of the column that should be used to extract ids from model. - Depending on the relation side it's either primary key column of parent model - or field name specified by related parameter. - - :param parent_model: model from which id column should be extracted - :type parent_model: Type[Model] - :param reverse: flag if the relation is reverse - :type reverse: bool - :param related: name of the relation field - :type related: str - :param use_raw: flag if aliases or field names should be used - :type use_raw: bool - :return: - :rtype: - """ - if reverse: - column_name = parent_model.ormar_config.pkname - return ( - parent_model.get_column_alias(column_name) if use_raw else column_name - ) - column = parent_model.ormar_config.model_fields[related] - return column.get_alias() if use_raw else column.name - - @classmethod - def get_related_field_name(cls, target_field: "ForeignKeyField") -> str: - """ - Returns name of the relation field that should be used in prefetch query. - This field is later used to register relation in prefetch query, - populate relations dict, and populate nested model in prefetch query. - - :param target_field: relation field that should be used in prefetch - :type target_field: Type[BaseField] - :return: name of the field - :rtype: str - """ - if target_field.is_multi: - return cls.get_name() - if target_field.virtual: - return target_field.get_related_name() - return target_field.to.ormar_config.pkname - - @classmethod - def get_filtered_names_to_extract(cls, prefetch_dict: Dict) -> List: - """ - Returns list of related fields names that should be followed to prefetch related - models from. - - List of models is translated into dict to assure each model is extracted only - once in one query, that's why this function accepts prefetch_dict not list. - - Only relations from current model are returned. - - :param prefetch_dict: dictionary of fields to extract - :type prefetch_dict: Dict - :return: list of fields names to extract - :rtype: List - """ - related_to_extract = [] - if prefetch_dict and prefetch_dict is not Ellipsis: - related_to_extract = [ - related - for related in cls.extract_related_names() - if related in prefetch_dict - ] - return related_to_extract diff --git a/ormar/models/modelproxy.py b/ormar/models/modelproxy.py index bcbd685..23654f7 100644 --- a/ormar/models/modelproxy.py +++ b/ormar/models/modelproxy.py @@ -1,14 +1,12 @@ from ormar.models.mixins import ( ExcludableMixin, MergeModelMixin, - PrefetchQueryMixin, PydanticMixin, SavePrepareMixin, ) class ModelTableProxy( - PrefetchQueryMixin, MergeModelMixin, SavePrepareMixin, ExcludableMixin, diff --git a/ormar/models/newbasemodel.py b/ormar/models/newbasemodel.py index 77abe47..a4f6ab2 100644 --- a/ormar/models/newbasemodel.py +++ b/ormar/models/newbasemodel.py @@ -25,7 +25,6 @@ import typing_extensions import ormar # noqa I100 from ormar.exceptions import ModelError, ModelPersistenceError -from ormar.fields import BaseField from ormar.fields.foreign_key import ForeignKeyField from ormar.fields.parsers import decode_bytes, encode_json from ormar.models.helpers import register_relation_in_alias_manager @@ -1167,18 +1166,3 @@ class NewBaseModel(pydantic.BaseModel, ModelTableProxy, metaclass=ModelMetaclass f"model without pk set!" ) return self_fields - - def get_relation_model_id(self, target_field: "BaseField") -> Optional[int]: - """ - Returns an id of the relation side model to use in prefetch query. - - :param target_field: field with relation definition - :type target_field: "BaseField" - :return: value of pk if set - :rtype: Optional[int] - """ - if target_field.virtual or target_field.is_multi: - return self.pk - related_name = target_field.name - related_model = getattr(self, related_name) - return None if not related_model else related_model.pk diff --git a/ormar/queryset/queries/prefetch_query.py b/ormar/queryset/queries/prefetch_query.py index 66c2e47..eaba8ee 100644 --- a/ormar/queryset/queries/prefetch_query.py +++ b/ormar/queryset/queries/prefetch_query.py @@ -1,84 +1,484 @@ -from typing import TYPE_CHECKING, Dict, List, Sequence, Set, Tuple, Type, cast +import abc +import logging +from abc import abstractmethod +from typing import ( + TYPE_CHECKING, + Any, + Dict, + List, + Sequence, + Tuple, + Type, + Union, + cast, +) -import ormar +import ormar # noqa: I100, I202 from ormar.queryset.clause import QueryClause from ormar.queryset.queries.query import Query -from ormar.queryset.utils import extract_models_to_dict_of_lists, translate_list_to_dict +from ormar.queryset.utils import translate_list_to_dict if TYPE_CHECKING: # pragma: no cover - from ormar import Model - from ormar.fields import BaseField, ForeignKeyField + from ormar import ForeignKeyField, Model from ormar.models.excludable import ExcludableItems - from ormar.queryset import OrderAction + from ormar.queryset import FilterAction, OrderAction + +logger = logging.getLogger(__name__) -def sort_models(models: List["Model"], orders_by: Dict) -> List["Model"]: +class UniqueList(list): """ - Since prefetch query gets all related models by ids the sorting needs to happen in - python. Since by default models are already sorted by id here we resort only if - order_by parameters was set. - - :param models: list of models already fetched from db - :type models: List[tests.test_prefetch_related.Division] - :param orders_by: order by dictionary - :type orders_by: Dict[str, str] - :return: sorted list of models - :rtype: List[tests.test_prefetch_related.Division] + Simple subclass of list that prevents the duplicates + Cannot use set as the order is important """ - sort_criteria = [ - (key, value) for key, value in orders_by.items() if isinstance(value, str) - ] - sort_criteria = sort_criteria[::-1] - for criteria in sort_criteria: - key_name, value = criteria - if value == "desc": - models.sort(key=lambda x: getattr(x, key_name), reverse=True) + + def append(self, item: Any) -> None: + if item not in self: + super().append(item) + + +class Node(abc.ABC): + """ + Base Node use to build a query tree and divide job into already loaded models + and the ones that still need to be fetched from database + """ + + def __init__(self, relation_field: "ForeignKeyField", parent: "Node") -> None: + self.parent = parent + self.children: List["Node"] = [] + if self.parent: + self.parent.children.append(self) + self.relation_field = relation_field + self.table_prefix = "" + self.rows: List = [] + self.models: List["Model"] = [] + self.use_alias: bool = False + + @property + def target_name(self) -> str: + """ + Return the name of the relation that is used to + fetch excludes/includes from the excludable mixin + as well as specifying the target to join in m2m relations + + :return: name of the relation + :rtype: str + """ + if ( + self.relation_field.self_reference + and self.relation_field.self_reference_primary == self.relation_field.name + ): + return self.relation_field.default_source_field_name() else: - models.sort(key=lambda x: getattr(x, key_name)) - return models + return self.relation_field.default_target_field_name() + + @abstractmethod + def extract_related_ids(self, column_name: str) -> List: # pragma: no cover + pass + + @abstractmethod + def reload_tree(self) -> None: # pragma: no cover + pass + + @abstractmethod + async def load_data(self) -> None: # pragma: no cover + pass + + def get_filter_for_prefetch(self) -> List["FilterAction"]: + """ + Populates where clause with condition to return only models within the + set of extracted ids. + If there are no ids for relation the empty list is returned. + + :return: list of filter clauses based on original models + :rtype: List[sqlalchemy.sql.elements.TextClause] + """ + column_name = self.relation_field.get_model_relation_fields( + self.parent.use_alias + ) + + ids = self.parent.extract_related_ids(column_name=column_name) + + if ids: + return self._prepare_filter_clauses(ids=ids) + return [] + + def _prepare_filter_clauses(self, ids: List) -> List["FilterAction"]: + """ + Gets the list of ids and construct a list of filter queries on + extracted appropriate column names + + :param ids: list of ids that should be used to fetch data + :type ids: List + :return: list of filter actions to use in query + :rtype: List["FilterAction"] + """ + clause_target = self.relation_field.get_filter_clause_target() + filter_column = self.relation_field.get_related_field_alias() + qryclause = QueryClause( + model_cls=clause_target, + select_related=[], + filter_clauses=[], + ) + kwargs = {f"{cast(str, filter_column)}__in": ids} + filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs) + return filter_clauses -def set_children_on_model( # noqa: CCR001 - model: "Model", - related: str, - children: Dict, - model_id: int, - models: Dict, - orders_by: Dict, -) -> None: +class AlreadyLoadedNode(Node): """ - Extract ids of child models by given relation id key value. - - Based on those ids the actual children model instances are fetched from - already fetched data. - - If needed the child models are resorted according to passed orders_by dict. - - Also relation is registered as each child is set as parent related field name value. - - :param model: parent model instance - :type model: Model - :param related: name of the related field - :type related: str - :param children: dictionary of children ids/ related field value - :type children: Dict[int, set] - :param model_id: id of the model on which children should be set - :type model_id: int - :param models: dictionary of child models instances - :type models: Dict - :param orders_by: order_by dictionary - :type orders_by: Dict + Node that was already loaded in select statement """ - for key, child_models in children.items(): - if key == model_id: - models_to_set = [models[child] for child in sorted(child_models)] - if models_to_set: - if orders_by and any(isinstance(x, str) for x in orders_by.values()): - models_to_set = sort_models( - models=models_to_set, orders_by=orders_by - ) - for child in models_to_set: - setattr(model, related, child) + + def __init__(self, relation_field: "ForeignKeyField", parent: "Node") -> None: + super().__init__(relation_field=relation_field, parent=parent) + self.use_alias = False + self._extract_own_models() + + def _extract_own_models(self) -> None: + """ + Extract own models that were already fetched and attached to root node + """ + for model in self.parent.models: + child_models = getattr(model, self.relation_field.name) + if isinstance(child_models, list): + self.models.extend(child_models) + elif child_models: + self.models.append(child_models) + + async def load_data(self) -> None: + """ + Triggers a data load in the child nodes + """ + for child in self.children: + await child.load_data() + + def reload_tree(self) -> None: + """ + After data was loaded we reload whole tree from the bottom + to include freshly loaded nodes + """ + for child in self.children: + child.reload_tree() + + def extract_related_ids(self, column_name: str) -> List: + """ + Extracts the selected column(s) values from own models. + Those values are used to construct filter clauses and populate child models. + + :param column_name: names of the column(s) that holds the relation info + :type column_name: Union[str, List[str]] + :return: List of extracted values of relation columns + :rtype: List + """ + list_of_ids = UniqueList() + for model in self.models: + child = getattr(model, column_name) + if isinstance(child, ormar.Model): + list_of_ids.append(child.pk) + elif child is not None: + list_of_ids.append(child) + return list_of_ids + + +class RootNode(AlreadyLoadedNode): + """ + Root model Node from which both main and prefetch query originated + """ + + def __init__(self, models: List["Model"]) -> None: + self.models = models + self.use_alias = False + self.children = [] + + def reload_tree(self) -> None: + for child in self.children: + child.reload_tree() + + +class LoadNode(Node): + """ + Nodes that actually need to be fetched from database in the prefetch query + """ + + def __init__( + self, + relation_field: "ForeignKeyField", + excludable: "ExcludableItems", + orders_by: List["OrderAction"], + parent: "Node", + source_model: Type["Model"], + ) -> None: + super().__init__(relation_field=relation_field, parent=parent) + self.excludable = excludable + self.exclude_prefix: str = "" + self.orders_by = orders_by + self.use_alias = True + self.grouped_models: Dict[Any, List["Model"]] = dict() + self.source_model = source_model + + async def load_data(self) -> None: + """ + Ensures that at least primary key columns from current model are included in + the query. + + Gets the filter values from the parent model and runs the query. + + Triggers a data load in child tasks. + """ + self._update_excludable_with_related_pks() + if self.relation_field.is_multi: + query_target = self.relation_field.through + select_related = [self.target_name] + else: + query_target = self.relation_field.to + select_related = [] + + filter_clauses = self.get_filter_for_prefetch() + + if filter_clauses: + qry = Query( + model_cls=query_target, + select_related=select_related, + filter_clauses=filter_clauses, + exclude_clauses=[], + offset=None, + limit_count=None, + excludable=self.excludable, + order_bys=self._extract_own_order_bys(), + limit_raw_sql=False, + ) + expr = qry.build_select_expression() + logger.debug( + expr.compile( + dialect=self.source_model.ormar_config.database._backend._dialect, + compile_kwargs={"literal_binds": True}, + ) + ) + self.rows = await query_target.ormar_config.database.fetch_all(expr) + + for child in self.children: + await child.load_data() + + def _update_excludable_with_related_pks(self) -> None: + """ + Makes sure that excludable is populated with own model primary keys values + if the excludable has the exclude/include clauses + """ + related_field_names = self.relation_field.get_related_field_name() + alias_manager = self.relation_field.to.ormar_config.alias_manager + relation_key = self._build_relation_key() + self.exclude_prefix = alias_manager.resolve_relation_alias_after_complex( + source_model=self.source_model, + relation_str=relation_key, + relation_field=self.relation_field, + ) + if self.relation_field.is_multi: + self.table_prefix = self.exclude_prefix + target_model = self.relation_field.to + model_excludable = self.excludable.get( + model_cls=target_model, alias=self.exclude_prefix + ) + # includes nested pks if not included already + for related_name in related_field_names: + if model_excludable.include and not model_excludable.is_included( + related_name + ): + model_excludable.set_values({related_name}, is_exclude=False) + + def _build_relation_string(self) -> str: + node: Union[LoadNode, Node] = self + relation = node.relation_field.name + while not isinstance(node.parent, RootNode): + relation = node.parent.relation_field.name + "__" + relation + node = node.parent + return relation + + def _build_relation_key(self) -> str: + relation_key = self._build_relation_string() + return relation_key + + def _extract_own_order_bys(self) -> List["OrderAction"]: + """ + Extracts list of order actions related to current model. + Since same model can happen multiple times in a tree we check not only the + match on given model but also that path from relation tree matches the + path in order action. + + :return: list of order actions related to current model + :rtype: List[OrderAction] + """ + own_order_bys = [] + own_path = self._get_full_tree_path() + for order_by in self.orders_by: + if ( + order_by.target_model == self.relation_field.to + and order_by.related_str.endswith(f"{own_path}") + ): + order_by.is_source_model_order = True + order_by.table_prefix = self.table_prefix + own_order_bys.append(order_by) + return own_order_bys + + def _get_full_tree_path(self) -> str: + """ + Iterates the nodes to extract path from root node. + + :return: path from root node + :rtype: str + """ + node: Node = self + relation_str = node.relation_field.name + while not isinstance(node.parent, RootNode): + node = node.parent + relation_str = f"{node.relation_field.name}__{relation_str}" + return relation_str + + def extract_related_ids(self, column_name: str) -> List: + """ + Extracts the selected column(s) values from own models. + Those values are used to construct filter clauses and populate child models. + + :param column_names: names of the column(s) that holds the relation info + :type column_names: Union[str, List[str]] + :return: List of extracted values of relation columns + :rtype: List + """ + column_name = self._prefix_column_names_with_table_prefix( + column_name=column_name + ) + return self._extract_simple_relation_keys(column_name=column_name) + + def _prefix_column_names_with_table_prefix(self, column_name: str) -> str: + return (f"{self.table_prefix}_" if self.table_prefix else "") + column_name + + def _extract_simple_relation_keys(self, column_name: str) -> List: + """ + Extracts simple relation keys values. + + :param column_name: names of the column(s) that holds the relation info + :type column_name: str + :return: List of extracted values of relation columns + :rtype: List + """ + list_of_ids = UniqueList() + for row in self.rows: + if row[column_name]: + list_of_ids.append(row[column_name]) + return list_of_ids + + def reload_tree(self) -> None: + """ + Instantiates models from loaded database rows. + Groups those instances by relation key for easy extract per parent. + Triggers same for child nodes and then populates + the parent node with own related models + """ + if self.rows: + self._instantiate_models() + self._group_models_by_relation_key() + for child in self.children: + child.reload_tree() + self._populate_parent_models() + + def _instantiate_models(self) -> None: + """ + Iterates the rows and initializes instances of ormar.Models. + Each model is instantiated only once (they can be duplicates for m2m relation + when multiple parent models refer to same child model since the query have to + also include the through model - hence full rows are unique, but related + models without through models can be not unique). + """ + fields_to_exclude = self.relation_field.to.get_names_to_exclude( + excludable=self.excludable, alias=self.exclude_prefix + ) + parsed_rows: Dict[Tuple, "Model"] = {} + for row in self.rows: + item = self.relation_field.to.extract_prefixed_table_columns( + item={}, + row=row, + table_prefix=self.table_prefix, + excludable=self.excludable, + ) + hashable_item = self._hash_item(item) + instance = parsed_rows.setdefault( + hashable_item, + self.relation_field.to(**item, **{"__excluded__": fields_to_exclude}), + ) + self.models.append(instance) + + def _hash_item(self, item: Dict) -> Tuple: + """ + Converts model dictionary into tuple to make it hashable and allow to use it + as a dictionary key - used to ensure unique instances of related models. + + :param item: instance dictionary + :type item: Dict + :return: tuple out of model dictionary + :rtype: Tuple + """ + result = [] + for key, value in sorted(item.items()): + result.append( + (key, self._hash_item(value) if isinstance(value, dict) else value) + ) + return tuple(result) + + def _group_models_by_relation_key(self) -> None: + """ + Groups own models by relation keys so it's easy later to extract those models + when iterating parent models. Note that order is important as it reflects + order by issued by the user. + """ + relation_key = self.relation_field.get_related_field_alias() + for index, row in enumerate(self.rows): + key = row[relation_key] + current_group = self.grouped_models.setdefault(key, []) + current_group.append(self.models[index]) + + def _populate_parent_models(self) -> None: + """ + Populate parent node models with own child models from grouped dictionary + """ + relation_key = self._get_relation_key_linking_models() + for model in self.parent.models: + children = self._get_own_models_related_to_parent( + model=model, relation_key=relation_key + ) + for child in children: + setattr(model, self.relation_field.name, child) + + def _get_relation_key_linking_models(self) -> Tuple[str, str]: + """ + Extract names and aliases of relation columns to use + in linking between own models and parent models + + :return: tuple of name and alias of relation columns + :rtype: List[Tuple[str, str]] + """ + column_name = self.relation_field.get_model_relation_fields(False) + column_alias = self.relation_field.get_model_relation_fields(True) + return column_name, column_alias + + def _get_own_models_related_to_parent( + self, model: "Model", relation_key: Tuple[str, str] + ) -> List["Model"]: + """ + Extracts related column values from parent and based on this key gets the + own grouped models. + + :param model: parent model from parent node + :type model: Model + :param relation_key: name and aliases linking relations + :type relation_key: List[Tuple[str, str]] + :return: list of own models to set on parent + :rtype: List[Model] + """ + column_name, column_alias = relation_key + model_value = getattr(model, column_name) + if isinstance(model_value, ormar.Model): + model_value = model_value.pk + return self.grouped_models.get(model_value, []) class PrefetchQuery: @@ -97,22 +497,13 @@ class PrefetchQuery: orders_by: List["OrderAction"], ) -> None: self.model = model_cls - self.database = self.model.ormar_config.database - self._prefetch_related = prefetch_related - self._select_related = select_related self.excludable = excludable - self.already_extracted: Dict = dict() - self.models: Dict = {} - self.select_dict = translate_list_to_dict(self._select_related) - self.orders_by = orders_by or [] - # TODO: refactor OrderActions to use it instead of strings from it - self.order_dict = translate_list_to_dict( - [x.query_str for x in self.orders_by], is_order=True - ) + self.select_dict = translate_list_to_dict(select_related, default={}) + self.prefetch_dict = translate_list_to_dict(prefetch_related, default={}) + self.orders_by = orders_by + self.load_tasks: List[Node] = [] - async def prefetch_related( - self, models: Sequence["Model"], rows: List - ) -> Sequence["Model"]: + async def prefetch_related(self, models: Sequence["Model"]) -> Sequence["Model"]: """ Main entry point for prefetch_query. @@ -123,487 +514,63 @@ class PrefetchQuery: Returns list with related models already prefetched and set. :param models: list of already instantiated models from main query - :type models: List[Model] + :type models: Sequence[Model] :param rows: row sql result of the main query before the prefetch :type rows: List[sqlalchemy.engine.result.RowProxy] :return: list of models with children prefetched :rtype: List[Model] """ - self.models = extract_models_to_dict_of_lists( - model_type=self.model, models=models, select_dict=self.select_dict + parent_task = RootNode(models=cast(List["Model"], models)) + self._build_load_tree( + prefetch_dict=self.prefetch_dict, + select_dict=self.select_dict, + parent=parent_task, + model=self.model, ) - self.models[self.model.get_name()] = models - return await self._prefetch_related_models(models=models, rows=rows) + await parent_task.load_data() + parent_task.reload_tree() + return parent_task.models - def _extract_ids_from_raw_data( - self, parent_model: Type["Model"], column_name: str - ) -> Set: - """ - Iterates over raw rows and extract id values of relation columns by using - prefixed column name. - - :param parent_model: ormar model class - :type parent_model: Type[Model] - :param column_name: name of the relation column which is a key column - :type column_name: str - :return: set of ids of related model that should be extracted - :rtype: set - """ - list_of_ids = set() - current_data = self.already_extracted.get(parent_model.get_name(), {}) - table_prefix = current_data.get("prefix", "") - column_name = (f"{table_prefix}_" if table_prefix else "") + column_name - for row in current_data.get("raw", []): - if row[column_name]: - list_of_ids.add(row[column_name]) - return list_of_ids - - def _extract_ids_from_preloaded_models( - self, parent_model: Type["Model"], column_name: str - ) -> Set: - """ - Extracts relation ids from already populated models if they were included - in the original query before. - - :param parent_model: model from which related ids should be extracted - :type parent_model: Type["Model"] - :param column_name: name of the relation column which is a key column - :type column_name: str - :return: set of ids of related model that should be extracted - :rtype: set - """ - list_of_ids = set() - for model in self.models.get(parent_model.get_name(), []): - child = getattr(model, column_name) - if isinstance(child, ormar.Model): - list_of_ids.add(child.pk) - else: - list_of_ids.add(child) - return list_of_ids - - def _extract_required_ids( - self, parent_model: Type["Model"], reverse: bool, related: str - ) -> Set: - """ - Delegates extraction of the fields to either get ids from raw sql response - or from already populated models. - - :param parent_model: model from which related ids should be extracted - :type parent_model: Type["Model"] - :param reverse: flag if the relation is reverse - :type reverse: bool - :param related: name of the field with relation - :type related: str - :return: set of ids of related model that should be extracted - :rtype: set - """ - use_raw = parent_model.get_name() not in self.models - - column_name = parent_model.get_column_name_for_id_extraction( - parent_model=parent_model, reverse=reverse, related=related, use_raw=use_raw - ) - - if use_raw: - return self._extract_ids_from_raw_data( - parent_model=parent_model, column_name=column_name - ) - - return self._extract_ids_from_preloaded_models( - parent_model=parent_model, column_name=column_name - ) - - def _get_filter_for_prefetch( + def _build_load_tree( self, - parent_model: Type["Model"], - target_model: Type["Model"], - reverse: bool, - related: str, - ) -> List: - """ - Populates where clause with condition to return only models within the - set of extracted ids. - - If there are no ids for relation the empty list is returned. - - :param parent_model: model from which related ids should be extracted - :type parent_model: Type["Model"] - :param target_model: model to which relation leads to - :type target_model: Type["Model"] - :param reverse: flag if the relation is reverse - :type reverse: bool - :param related: name of the field with relation - :type related: str - :return: - :rtype: List[sqlalchemy.sql.elements.TextClause] - """ - ids = self._extract_required_ids( - parent_model=parent_model, reverse=reverse, related=related - ) - if ids: - ( - clause_target, - filter_column, - ) = parent_model.get_clause_target_and_filter_column_name( - parent_model=parent_model, - target_model=target_model, - reverse=reverse, - related=related, - ) - qryclause = QueryClause( - model_cls=clause_target, select_related=[], filter_clauses=[] - ) - kwargs = {f"{filter_column}__in": ids} - filter_clauses, _ = qryclause.prepare_filter(_own_only=False, **kwargs) - return filter_clauses - return [] - - def _populate_nested_related( - self, model: "Model", prefetch_dict: Dict, orders_by: Dict - ) -> "Model": - """ - Populates all related models children of parent model that are - included in prefetch query. - - :param model: ormar model instance - :type model: Model - :param prefetch_dict: dictionary of models to prefetch - :type prefetch_dict: Dict - :param orders_by: dictionary of order bys - :type orders_by: Dict - :return: model with children populated - :rtype: Model - """ - related_to_extract = model.get_filtered_names_to_extract( - prefetch_dict=prefetch_dict - ) - - for related in related_to_extract: - target_field = model.ormar_config.model_fields[related] - target_field = cast("ForeignKeyField", target_field) - target_model = target_field.to.get_name() - model_id = model.get_relation_model_id(target_field=target_field) - - if model_id is None: # pragma: no cover - continue - - field_name = model.get_related_field_name(target_field=target_field) - - children = self.already_extracted.get(target_model, {}).get(field_name, {}) - models = self.already_extracted.get(target_model, {}).get("pk_models", {}) - set_children_on_model( - model=model, - related=related, - children=children, - model_id=model_id, - models=models, - orders_by=orders_by.get(related, {}), - ) - - return model - - async def _prefetch_related_models( - self, models: Sequence["Model"], rows: List - ) -> Sequence["Model"]: - """ - Main method of the query. - - Translates select nad prefetch list into dictionaries to avoid querying the - same related models multiple times. - - Keeps the list of already extracted models. - - Extracts the related models from the database and later populate all children - on each of the parent models from list. - - :param models: list of parent models from main query - :type models: List[Model] - :param rows: raw response from sql query - :type rows: List[sqlalchemy.engine.result.RowProxy] - :return: list of models with prefetch children populated - :rtype: List[Model] - """ - self.already_extracted = {self.model.get_name(): {"raw": rows}} - select_dict = translate_list_to_dict(self._select_related) - prefetch_dict = translate_list_to_dict(self._prefetch_related) - target_model = self.model - orders_by = self.order_dict - for related in prefetch_dict.keys(): - await self._extract_related_models( - related=related, - target_model=target_model, - prefetch_dict=prefetch_dict.get(related, {}), - select_dict=select_dict.get(related, {}), - excludable=self.excludable, - orders_by=orders_by.get(related, {}), - ) - final_models = [] - for model in models: - final_models.append( - self._populate_nested_related( - model=model, prefetch_dict=prefetch_dict, orders_by=self.order_dict - ) - ) - return models - - async def _extract_related_models( # noqa: CFQ002, CCR001 - self, - related: str, - target_model: Type["Model"], - prefetch_dict: Dict, select_dict: Dict, - excludable: "ExcludableItems", - orders_by: Dict, - ) -> None: - """ - Constructs queries with required ids and extracts data with fields that should - be included/excluded. - - Runs the queries against the database and populated dictionaries with ids and - with actual extracted children models. - - Calls itself recurrently to extract deeper nested relations of related model. - - :param related: name of the relation - :type related: str - :param target_model: model to which relation leads to - :type target_model: Type[Model] - :param prefetch_dict: prefetch related list converted into dictionary - :type prefetch_dict: Dict - :param select_dict: select related list converted into dictionary - :type select_dict: Dict - :param fields: fields to include - :type fields: Union[Set[Any], Dict[Any, Any], None] - :param exclude_fields: fields to exclude - :type exclude_fields: Union[Set[Any], Dict[Any, Any], None] - :param orders_by: dictionary of order bys clauses - :type orders_by: Dict - :return: None - :rtype: None - """ - target_field = target_model.ormar_config.model_fields[related] - target_field = cast("ForeignKeyField", target_field) - reverse = False - if target_field.virtual or target_field.is_multi: - reverse = True - - parent_model = target_model - - filter_clauses = self._get_filter_for_prefetch( - parent_model=parent_model, - target_model=target_field.to, - reverse=reverse, - related=related, - ) - if not filter_clauses: # related field is empty - return - - already_loaded = select_dict is Ellipsis or related in select_dict - - if not already_loaded: - # If not already loaded with select_related - related_field_name = parent_model.get_related_field_name( - target_field=target_field - ) - table_prefix, exclude_prefix, rows = await self._run_prefetch_query( - target_field=target_field, - excludable=excludable, - filter_clauses=filter_clauses, - related_field_name=related_field_name, - ) - else: - rows = [] - table_prefix = "" - exclude_prefix = "" - - if prefetch_dict and prefetch_dict is not Ellipsis: - for subrelated in prefetch_dict.keys(): - await self._extract_related_models( - related=subrelated, - target_model=target_field.to, - prefetch_dict=prefetch_dict.get(subrelated, {}), - select_dict=self._get_select_related_if_apply( - subrelated, select_dict - ), - excludable=excludable, - orders_by=self._get_select_related_if_apply(subrelated, orders_by), - ) - - if not already_loaded: - self._populate_rows( - rows=rows, - parent_model=parent_model, - target_field=target_field, - table_prefix=table_prefix, - exclude_prefix=exclude_prefix, - excludable=excludable, - prefetch_dict=prefetch_dict, - orders_by=orders_by, - ) - else: - self._update_already_loaded_rows( - target_field=target_field, - prefetch_dict=prefetch_dict, - orders_by=orders_by, - ) - - async def _run_prefetch_query( - self, - target_field: "BaseField", - excludable: "ExcludableItems", - filter_clauses: List, - related_field_name: str, - ) -> Tuple[str, str, List]: - """ - Actually runs the queries against the database and populates the raw response - for given related model. - - Returns table prefix as it's later needed to eventually initialize the children - models. - - :param target_field: ormar field with relation definition - :type target_field: "BaseField" - :param filter_clauses: list of clauses, actually one clause with ids of relation - :type filter_clauses: List[sqlalchemy.sql.elements.TextClause] - :return: table prefix and raw rows from sql response - :rtype: Tuple[str, List] - """ - target_model = target_field.to - target_name = target_model.get_name() - select_related = [] - query_target = target_model - table_prefix = "" - exclude_prefix = ( - target_field.to.ormar_config.alias_manager.resolve_relation_alias( - from_model=target_field.owner, relation_name=target_field.name - ) - ) - if target_field.is_multi: - query_target = target_field.through - select_related = [target_name] - table_prefix = ( - target_field.to.ormar_config.alias_manager.resolve_relation_alias( - from_model=query_target, relation_name=target_name - ) - ) - exclude_prefix = table_prefix - self.already_extracted.setdefault(target_name, {})["prefix"] = table_prefix - - model_excludable = excludable.get(model_cls=target_model, alias=exclude_prefix) - if model_excludable.include and not model_excludable.is_included( - related_field_name - ): - model_excludable.set_values({related_field_name}, is_exclude=False) - - qry = Query( - model_cls=query_target, - select_related=select_related, - filter_clauses=filter_clauses, - exclude_clauses=[], - offset=None, - limit_count=None, - excludable=excludable, - order_bys=None, - limit_raw_sql=False, - ) - expr = qry.build_select_expression() - # print(expr.compile(compile_kwargs={"literal_binds": True})) - rows = await self.database.fetch_all(expr) - self.already_extracted.setdefault(target_name, {}).update({"raw": rows}) - return table_prefix, exclude_prefix, rows - - @staticmethod - def _get_select_related_if_apply(related: str, select_dict: Dict) -> Dict: - """ - Extract nested related of select_related dictionary to extract models nested - deeper on related model and already loaded in select related query. - - :param related: name of the relation - :type related: str - :param select_dict: dictionary of select related models in main query - :type select_dict: Dict - :return: dictionary with nested related of select related - :rtype: Dict - """ - return ( - select_dict.get(related, {}) - if (select_dict and select_dict is not Ellipsis and related in select_dict) - else {} - ) - - def _update_already_loaded_rows( # noqa: CFQ002 - self, target_field: "BaseField", prefetch_dict: Dict, orders_by: Dict - ) -> None: - """ - Updates models that are already loaded, usually children of children. - - :param target_field: ormar field with relation definition - :type target_field: "BaseField" - :param prefetch_dict: dictionaries of related models to prefetch - :type prefetch_dict: Dict - :param orders_by: dictionary of order by clauses by model - :type orders_by: Dict - """ - target_model = target_field.to - for instance in self.models.get(target_model.get_name(), []): - self._populate_nested_related( - model=instance, prefetch_dict=prefetch_dict, orders_by=orders_by - ) - - def _populate_rows( # noqa: CFQ002 - self, - rows: List, - target_field: "ForeignKeyField", - parent_model: Type["Model"], - table_prefix: str, - exclude_prefix: str, - excludable: "ExcludableItems", prefetch_dict: Dict, - orders_by: Dict, + parent: Node, + model: Type["Model"], ) -> None: """ - Instantiates children models extracted from given relation. + Build a tree of already loaded nodes and nodes that need + to be loaded through the prefetch query. - Populates them with their own nested children if they are included in prefetch - query. - - Sets the initialized models and ids of them under corresponding keys in - already_extracted dictionary. Later those instances will be fetched by ids - and set on the parent model after sorting if needed. - - :param excludable: structure of fields to include and exclude - :type excludable: ExcludableItems - :param rows: raw sql response from the prefetch query - :type rows: List[sqlalchemy.engine.result.RowProxy] - :param target_field: field with relation definition from parent model - :type target_field: "BaseField" - :param parent_model: model with relation definition - :type parent_model: Type[Model] - :param table_prefix: prefix of the target table from current relation - :type table_prefix: str - :param prefetch_dict: dictionaries of related models to prefetch + :param select_dict: dictionary wth select query structure + :type select_dict: Dict + :param prefetch_dict: dictionary with prefetch query structure :type prefetch_dict: Dict - :param orders_by: dictionary of order by clauses by model - :type orders_by: Dict + :param parent: parent Node + :type parent: Node + :param model: currently processed model + :type model: Model """ - target_model = target_field.to - for row in rows: - field_name = parent_model.get_related_field_name(target_field=target_field) - item = target_model.extract_prefixed_table_columns( - item={}, row=row, table_prefix=table_prefix, excludable=excludable + for related in prefetch_dict.keys(): + relation_field = cast( + "ForeignKeyField", model.ormar_config.model_fields[related] ) - item["__excluded__"] = target_model.get_names_to_exclude( - excludable=excludable, alias=exclude_prefix - ) - instance = target_model(**item) - instance = self._populate_nested_related( - model=instance, prefetch_dict=prefetch_dict, orders_by=orders_by - ) - field_db_name = target_model.get_column_alias(field_name) - models = self.already_extracted[target_model.get_name()].setdefault( - "pk_models", {} - ) - if instance.pk not in models: - models[instance.pk] = instance - self.already_extracted[target_model.get_name()].setdefault( - field_name, dict() - ).setdefault(row[field_db_name], set()).add(instance.pk) + if related in select_dict: + task: Node = AlreadyLoadedNode( + relation_field=relation_field, parent=parent + ) + else: + task = LoadNode( + relation_field=relation_field, + excludable=self.excludable, + orders_by=self.orders_by, + parent=parent, + source_model=self.model, + ) + if prefetch_dict: + self._build_load_tree( + select_dict=select_dict.get(related, {}), + prefetch_dict=prefetch_dict.get(related, {}), + parent=task, + model=model.ormar_config.model_fields[related].to, + ) diff --git a/ormar/queryset/queryset.py b/ormar/queryset/queryset.py index 0da47f5..91f8cbc 100644 --- a/ormar/queryset/queryset.py +++ b/ormar/queryset/queryset.py @@ -172,7 +172,7 @@ class QuerySet(Generic[T]): select_related=self._select_related, orders_by=self.order_bys, ) - return await query.prefetch_related(models=models, rows=rows) # type: ignore + return await query.prefetch_related(models=models) # type: ignore async def _process_query_result_rows(self, rows: List) -> List["T"]: """ diff --git a/ormar/queryset/utils.py b/ormar/queryset/utils.py index 97122d8..234e0cc 100644 --- a/ormar/queryset/utils.py +++ b/ormar/queryset/utils.py @@ -6,7 +6,6 @@ from typing import ( Dict, List, Optional, - Sequence, Set, Tuple, Type, @@ -42,7 +41,7 @@ def check_node_not_dict_or_not_last_node( def translate_list_to_dict( # noqa: CCR001 - list_to_trans: Union[List, Set], is_order: bool = False + list_to_trans: Union[List, Set], default: Any = ... ) -> Dict: """ Splits the list of strings by '__' and converts them to dictionary with nested @@ -53,6 +52,8 @@ def translate_list_to_dict( # noqa: CCR001 :param list_to_trans: input list :type list_to_trans: Union[List, Set] + :param default: value to use as a default value + :type default: Any :param is_order: flag if change affects order_by clauses are they require special default value with sort order. :type is_order: bool @@ -63,14 +64,7 @@ def translate_list_to_dict( # noqa: CCR001 for path in list_to_trans: current_level = new_dict parts = path.split("__") - def_val: Any = ... - if is_order: - if parts[0][0] == "-": - def_val = "desc" - parts[0] = parts[0][1:] - else: - def_val = "asc" - + def_val: Any = default for ind, part in enumerate(parts): is_last = ind == len(parts) - 1 if check_node_not_dict_or_not_last_node( @@ -189,78 +183,6 @@ def update_dict_from_list(curr_dict: Dict, list_to_update: Union[List, Set]) -> return updated_dict -def extract_nested_models( # noqa: CCR001 - model: "Model", model_type: Type["Model"], select_dict: Dict, extracted: Dict -) -> None: - """ - Iterates over model relations and extracts all nested models from select_dict and - puts them in corresponding list under relation name in extracted dict.keys - - Basically flattens all relation to dictionary of all related models, that can be - used on several models and extract all of their children into dictionary of lists - witch children models. - - Goes also into nested relations if needed (specified in select_dict). - - :param model: parent Model - :type model: Model - :param model_type: parent model class - :type model_type: Type[Model] - :param select_dict: dictionary of related models from select_related - :type select_dict: Dict - :param extracted: dictionary with already extracted models - :type extracted: Dict - """ - follow = [rel for rel in model_type.extract_related_names() if rel in select_dict] - for related in follow: - child = getattr(model, related) - if not child: - continue - target_model = model_type.ormar_config.model_fields[related].to - if isinstance(child, list): - extracted.setdefault(target_model.get_name(), []).extend(child) - if select_dict[related] is not Ellipsis: - for sub_child in child: - extract_nested_models( - sub_child, target_model, select_dict[related], extracted - ) - else: - extracted.setdefault(target_model.get_name(), []).append(child) - if select_dict[related] is not Ellipsis: - extract_nested_models( - child, target_model, select_dict[related], extracted - ) - - -def extract_models_to_dict_of_lists( - model_type: Type["Model"], - models: Sequence["Model"], - select_dict: Dict, - extracted: Optional[Dict] = None, -) -> Dict: - """ - Receives a list of models and extracts all of the children and their children - into dictionary of lists with children models, flattening the structure to one dict - with all children models under their relation keys. - - :param model_type: parent model class - :type model_type: Type[Model] - :param models: list of models from which related models should be extracted. - :type models: List[Model] - :param select_dict: dictionary of related models from select_related - :type select_dict: Dict - :param extracted: dictionary with already extracted models - :type extracted: Dict - :return: dictionary of lists f related models - :rtype: Dict - """ - if not extracted: - extracted = dict() - for model in models: - extract_nested_models(model, model_type, select_dict, extracted) - return extracted - - def get_relationship_alias_model_and_str( source_model: Type["Model"], related_parts: List ) -> Tuple[str, Type["Model"], str, bool]: diff --git a/poetry.lock b/poetry.lock index 8cf7e9f..76b66e3 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,9 +1,10 @@ -# This file is automatically @generated by Poetry 1.7.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.4.1 and should not be changed by hand. [[package]] name = "aiomysql" version = "0.2.0" description = "MySQL driver for asyncio." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -22,6 +23,7 @@ sa = ["sqlalchemy (>=1.3,<1.4)"] name = "aiopg" version = "1.4.0" description = "Postgres integration with asyncio." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -40,6 +42,7 @@ sa = ["sqlalchemy[postgresql-psycopg2binary] (>=1.3,<1.5)"] name = "aiosqlite" version = "0.19.0" description = "asyncio bridge to the standard sqlite3 module" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -55,6 +58,7 @@ docs = ["sphinx (==6.1.3)", "sphinx-mdinclude (==0.5.3)"] name = "annotated-types" version = "0.6.0" description = "Reusable constraint types to use with typing.Annotated" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -69,6 +73,7 @@ typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.9\""} name = "anyio" version = "4.3.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -91,6 +96,7 @@ trio = ["trio (>=0.23)"] name = "asgi-lifespan" version = "2.1.0" description = "Programmatic startup/shutdown of ASGI apps." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -105,6 +111,7 @@ sniffio = "*" name = "astunparse" version = "1.6.3" description = "An AST unparser for Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -120,6 +127,7 @@ wheel = ">=0.23.0,<1.0" name = "async-timeout" version = "4.0.3" description = "Timeout context manager for asyncio programs" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -131,6 +139,7 @@ files = [ name = "asyncpg" version = "0.28.0" description = "An asyncio PostgreSQL driver" +category = "main" optional = true python-versions = ">=3.7.0" files = [ @@ -184,6 +193,7 @@ test = ["flake8 (>=5.0,<6.0)", "uvloop (>=0.15.3)"] name = "babel" version = "2.14.0" description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -201,6 +211,7 @@ dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] name = "black" version = "24.3.0" description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -247,6 +258,7 @@ uvloop = ["uvloop (>=0.15.2)"] name = "certifi" version = "2024.2.2" description = "Python package for providing Mozilla's CA Bundle." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -258,6 +270,7 @@ files = [ name = "cffi" version = "1.16.0" description = "Foreign Function Interface for Python calling C code." +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -322,6 +335,7 @@ pycparser = "*" name = "cfgv" version = "3.4.0" description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -333,6 +347,7 @@ files = [ name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" optional = false python-versions = ">=3.7.0" files = [ @@ -432,6 +447,7 @@ files = [ name = "click" version = "8.1.7" description = "Composable command line interface toolkit" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -446,6 +462,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} name = "codecov" version = "2.1.13" description = "Hosted coverage reports for GitHub, Bitbucket and Gitlab" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -461,6 +478,7 @@ requests = ">=2.7.9" name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ @@ -472,6 +490,7 @@ files = [ name = "coverage" version = "7.4.4" description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -539,6 +558,7 @@ toml = ["tomli"] name = "cryptography" version = "42.0.5" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -593,6 +613,7 @@ test-randomorder = ["pytest-randomly"] name = "databases" version = "0.7.0" description = "Async database support for Python." +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -617,6 +638,7 @@ sqlite = ["aiosqlite"] name = "dataclasses" version = "0.6" description = "A backport of the dataclasses module for Python 3.6" +category = "dev" optional = false python-versions = "*" files = [ @@ -628,6 +650,7 @@ files = [ name = "distlib" version = "0.3.8" description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" files = [ @@ -639,6 +662,7 @@ files = [ name = "exceptiongroup" version = "1.2.0" description = "Backport of PEP 654 (exception groups)" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -649,10 +673,27 @@ files = [ [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "faker" +version = "24.3.0" +description = "Faker is a Python package that generates fake data for you." +category = "dev" +optional = false +python-versions = ">=3.8" +files = [ + {file = "Faker-24.3.0-py3-none-any.whl", hash = "sha256:9978025e765ba79f8bf6154c9630a9c2b7f9c9b0f175d4ad5e04b19a82a8d8d6"}, + {file = "Faker-24.3.0.tar.gz", hash = "sha256:5fb5aa9749d09971e04a41281ae3ceda9414f683d4810a694f8a8eebb8f9edec"}, +] + +[package.dependencies] +python-dateutil = ">=2.4" +typing-extensions = {version = ">=3.10.0.1", markers = "python_version <= \"3.8\""} + [[package]] name = "fastapi" version = "0.109.2" description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -672,6 +713,7 @@ all = ["email-validator (>=2.0.0)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)" name = "filelock" version = "3.13.1" description = "A platform independent file lock." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -688,6 +730,7 @@ typing = ["typing-extensions (>=4.8)"] name = "ghp-import" version = "2.1.0" description = "Copy your docs directly to the gh-pages branch." +category = "dev" optional = false python-versions = "*" files = [ @@ -705,6 +748,7 @@ dev = ["flake8", "markdown", "twine", "wheel"] name = "greenlet" version = "3.0.3" description = "Lightweight in-process concurrent programming" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -776,6 +820,7 @@ test = ["objgraph", "psutil"] name = "griffe" version = "0.42.1" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -791,6 +836,7 @@ colorama = ">=0.4" name = "h11" version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -802,6 +848,7 @@ files = [ name = "httpcore" version = "0.17.3" description = "A minimal low-level HTTP client." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -813,16 +860,17 @@ files = [ anyio = ">=3.0,<5.0" certifi = "*" h11 = ">=0.13,<0.15" -sniffio = "==1.*" +sniffio = ">=1.0.0,<2.0.0" [package.extras] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" version = "0.24.1" description = "The next generation HTTP client." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -838,14 +886,15 @@ sniffio = "*" [package.extras] brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<14)"] http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "identify" version = "2.5.35" description = "File identification library for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -860,6 +909,7 @@ license = ["ukkonen"] name = "idna" version = "3.6" description = "Internationalized Domain Names in Applications (IDNA)" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -871,6 +921,7 @@ files = [ name = "importlib-metadata" version = "7.1.0" description = "Read metadata from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -890,6 +941,7 @@ testing = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", name = "importlib-resources" version = "6.4.0" description = "Read resources from Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -908,6 +960,7 @@ testing = ["jaraco.test (>=5.4)", "pytest (>=6)", "pytest-checkdocs (>=2.4)", "p name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -919,6 +972,7 @@ files = [ name = "jinja2" version = "3.1.3" description = "A very fast and expressive template engine." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -936,6 +990,7 @@ i18n = ["Babel (>=2.7)"] name = "markdown" version = "3.6" description = "Python implementation of John Gruber's Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -954,6 +1009,7 @@ testing = ["coverage", "pyyaml"] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1023,6 +1079,7 @@ files = [ name = "mergedeep" version = "1.3.4" description = "A deep merge function for 🐍." +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1034,6 +1091,7 @@ files = [ name = "mike" version = "2.0.0" description = "Manage multiple versions of your MkDocs-powered documentation" +category = "dev" optional = false python-versions = "*" files = [ @@ -1058,6 +1116,7 @@ test = ["coverage", "flake8 (>=3.0)", "flake8-quotes", "shtab"] name = "mkdocs" version = "1.5.3" description = "Project documentation with Markdown." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1089,6 +1148,7 @@ min-versions = ["babel (==2.9.0)", "click (==7.0)", "colorama (==0.4)", "ghp-imp name = "mkdocs-autorefs" version = "1.0.1" description = "Automatically link across pages in MkDocs." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1105,6 +1165,7 @@ mkdocs = ">=1.1" name = "mkdocs-gen-files" version = "0.5.0" description = "MkDocs plugin to programmatically generate documentation pages during the build" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1119,6 +1180,7 @@ mkdocs = ">=1.0.3" name = "mkdocs-literate-nav" version = "0.6.1" description = "MkDocs plugin to specify the navigation in Markdown instead of YAML" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1133,6 +1195,7 @@ mkdocs = ">=1.0.3" name = "mkdocs-material" version = "9.2.8" description = "Documentation that simply works" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1157,6 +1220,7 @@ requests = ">=2.31,<3.0" name = "mkdocs-material-extensions" version = "1.3.1" description = "Extension pack for Python Markdown and MkDocs Material." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1168,6 +1232,7 @@ files = [ name = "mkdocs-section-index" version = "0.3.8" description = "MkDocs plugin to allow clickable sections that lead to an index page" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1182,6 +1247,7 @@ mkdocs = ">=1.2" name = "mkdocstrings" version = "0.22.0" description = "Automatic documentation from sources, for MkDocs." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1209,6 +1275,7 @@ python-legacy = ["mkdocstrings-python-legacy (>=0.2.1)"] name = "mkdocstrings-python" version = "1.8.0" description = "A Python handler for mkdocstrings." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1224,6 +1291,7 @@ mkdocstrings = ">=0.20" name = "mypy" version = "1.9.0" description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1271,6 +1339,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1282,6 +1351,7 @@ files = [ name = "mysqlclient" version = "2.2.4" description = "Python interface to MySQL" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1300,6 +1370,7 @@ files = [ name = "nest-asyncio" version = "1.6.0" description = "Patch asyncio to allow nested event loops" +category = "dev" optional = false python-versions = ">=3.5" files = [ @@ -1311,6 +1382,7 @@ files = [ name = "nodeenv" version = "1.8.0" description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" files = [ @@ -1325,6 +1397,7 @@ setuptools = "*" name = "orjson" version = "3.9.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" +category = "main" optional = true python-versions = ">=3.8" files = [ @@ -1384,6 +1457,7 @@ files = [ name = "packaging" version = "24.0" description = "Core utilities for Python packages" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1395,6 +1469,7 @@ files = [ name = "paginate" version = "0.5.6" description = "Divides large result sets into pages for easier browsing" +category = "dev" optional = false python-versions = "*" files = [ @@ -1405,6 +1480,7 @@ files = [ name = "pathspec" version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1416,6 +1492,7 @@ files = [ name = "platformdirs" version = "4.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1431,6 +1508,7 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4.3)", "pytest- name = "pluggy" version = "1.4.0" description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1446,6 +1524,7 @@ testing = ["pytest", "pytest-benchmark"] name = "pre-commit" version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1464,6 +1543,7 @@ virtualenv = ">=20.10.0" name = "psycopg2-binary" version = "2.9.9" description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1545,6 +1625,7 @@ files = [ name = "py-cpuinfo" version = "9.0.0" description = "Get CPU info with pure Python" +category = "dev" optional = false python-versions = "*" files = [ @@ -1556,6 +1637,7 @@ files = [ name = "pycparser" version = "2.21" description = "C parser in Python" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ @@ -1567,6 +1649,7 @@ files = [ name = "pydantic" version = "2.5.3" description = "Data validation using Python type hints" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1586,6 +1669,7 @@ email = ["email-validator (>=2.0.0)"] name = "pydantic-core" version = "2.14.6" description = "" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1703,6 +1787,7 @@ typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" name = "pydantic-extra-types" version = "2.6.0" description = "Extra Pydantic types." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1720,6 +1805,7 @@ all = ["pendulum (>=3.0.0,<4.0.0)", "phonenumbers (>=8,<9)", "pycountry (>=23)", name = "pygments" version = "2.17.2" description = "Pygments is a syntax highlighting package written in Python." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1735,6 +1821,7 @@ windows-terminal = ["colorama (>=0.4.6)"] name = "pymdown-extensions" version = "10.7.1" description = "Extension pack for Python Markdown." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -1753,6 +1840,7 @@ extra = ["pygments (>=2.12)"] name = "pymysql" version = "1.1.0" description = "Pure Python MySQL Driver" +category = "main" optional = true python-versions = ">=3.7" files = [ @@ -1768,6 +1856,7 @@ rsa = ["cryptography"] name = "pyparsing" version = "3.1.2" description = "pyparsing module - Classes and methods to define and execute parsing grammars" +category = "dev" optional = false python-versions = ">=3.6.8" files = [ @@ -1782,6 +1871,7 @@ diagrams = ["jinja2", "railroad-diagrams"] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1804,6 +1894,7 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no name = "pytest-asyncio" version = "0.21.1" description = "Pytest support for asyncio" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1822,6 +1913,7 @@ testing = ["coverage (>=6.2)", "flaky (>=3.5.0)", "hypothesis (>=5.7.1)", "mypy name = "pytest-benchmark" version = "4.0.0" description = "A ``pytest`` fixture for benchmarking code. It will group the tests into rounds that are calibrated to the chosen timer." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1842,6 +1934,7 @@ histogram = ["pygal", "pygaljs"] name = "pytest-codspeed" version = "2.2.1" description = "Pytest plugin to create CodSpeed benchmarks" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1864,6 +1957,7 @@ test = ["pytest (>=7.0,<8.0)", "pytest-cov (>=4.0.0,<4.1.0)"] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -1882,6 +1976,7 @@ testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtuale name = "python-dateutil" version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" files = [ @@ -1896,6 +1991,7 @@ six = ">=1.5" name = "pytz" version = "2024.1" description = "World timezone definitions, modern and historical" +category = "dev" optional = false python-versions = "*" files = [ @@ -1907,6 +2003,7 @@ files = [ name = "pyyaml" version = "6.0.1" description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1915,7 +2012,6 @@ files = [ {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, - {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, @@ -1923,16 +2019,8 @@ files = [ {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, - {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, - {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a08c6f0fe150303c1c6b71ebcd7213c2858041a7e01975da3a99aed1e7a378ef"}, - {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, - {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, - {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, - {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, @@ -1949,7 +2037,6 @@ files = [ {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, - {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, @@ -1957,7 +2044,6 @@ files = [ {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, - {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, @@ -1967,6 +2053,7 @@ files = [ name = "pyyaml-env-tag" version = "0.1" description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1981,6 +2068,7 @@ pyyaml = "*" name = "regex" version = "2023.12.25" description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2083,6 +2171,7 @@ files = [ name = "requests" version = "2.31.0" description = "Python HTTP for Humans." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2104,6 +2193,7 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] name = "ruff" version = "0.0.275" description = "An extremely fast Python linter, written in Rust." +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2130,6 +2220,7 @@ files = [ name = "setuptools" version = "69.2.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2146,6 +2237,7 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" files = [ @@ -2157,6 +2249,7 @@ files = [ name = "sniffio" version = "1.3.1" description = "Sniff out which async library your code is running under" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2168,6 +2261,7 @@ files = [ name = "sqlalchemy" version = "1.4.52" description = "Database Abstraction Library" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ @@ -2220,7 +2314,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} +greenlet = {version = "!=0.4.17", markers = "python_version >= \"3\" and platform_machine == \"aarch64\" or python_version >= \"3\" and platform_machine == \"ppc64le\" or python_version >= \"3\" and platform_machine == \"x86_64\" or python_version >= \"3\" and platform_machine == \"amd64\" or python_version >= \"3\" and platform_machine == \"AMD64\" or python_version >= \"3\" and platform_machine == \"win32\" or python_version >= \"3\" and platform_machine == \"WIN32\""} [package.extras] aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] @@ -2247,6 +2341,7 @@ sqlcipher = ["sqlcipher3_binary"] name = "starlette" version = "0.36.3" description = "The little ASGI library that shines." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2265,6 +2360,7 @@ full = ["httpx (>=0.22.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.7 name = "tomli" version = "2.0.1" description = "A lil' TOML parser" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2276,6 +2372,7 @@ files = [ name = "types-aiofiles" version = "23.2.0.20240311" description = "Typing stubs for aiofiles" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2287,6 +2384,7 @@ files = [ name = "types-cryptography" version = "3.3.23.2" description = "Typing stubs for cryptography" +category = "dev" optional = false python-versions = "*" files = [ @@ -2298,6 +2396,7 @@ files = [ name = "types-enum34" version = "1.1.8" description = "Typing stubs for enum34" +category = "dev" optional = false python-versions = "*" files = [ @@ -2309,6 +2408,7 @@ files = [ name = "types-ipaddress" version = "1.0.8" description = "Typing stubs for ipaddress" +category = "dev" optional = false python-versions = "*" files = [ @@ -2320,6 +2420,7 @@ files = [ name = "types-orjson" version = "3.6.2" description = "Typing stubs for orjson" +category = "dev" optional = false python-versions = "*" files = [ @@ -2331,6 +2432,7 @@ files = [ name = "types-pkg-resources" version = "0.1.3" description = "Typing stubs for pkg_resources" +category = "dev" optional = false python-versions = "*" files = [ @@ -2342,6 +2444,7 @@ files = [ name = "types-pymysql" version = "1.1.0.1" description = "Typing stubs for PyMySQL" +category = "dev" optional = false python-versions = "*" files = [ @@ -2353,6 +2456,7 @@ files = [ name = "types-requests" version = "2.31.0.20240311" description = "Typing stubs for requests" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2367,6 +2471,7 @@ urllib3 = ">=2" name = "types-toml" version = "0.10.8.20240310" description = "Typing stubs for toml" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2378,6 +2483,7 @@ files = [ name = "types-ujson" version = "5.9.0.0" description = "Typing stubs for ujson" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2389,6 +2495,7 @@ files = [ name = "typing-extensions" version = "4.10.0" description = "Backported and Experimental Type Hints for Python 3.8+" +category = "main" optional = false python-versions = ">=3.8" files = [ @@ -2400,6 +2507,7 @@ files = [ name = "urllib3" version = "2.2.1" description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2417,6 +2525,7 @@ zstd = ["zstandard (>=0.18.0)"] name = "verspec" version = "0.1.0" description = "Flexible version handling" +category = "dev" optional = false python-versions = "*" files = [ @@ -2431,6 +2540,7 @@ test = ["coverage", "flake8 (>=3.7)", "mypy", "pretend", "pytest"] name = "virtualenv" version = "20.25.1" description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2451,6 +2561,7 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess name = "watchdog" version = "3.0.0" description = "Filesystem events monitoring" +category = "dev" optional = false python-versions = ">=3.7" files = [ @@ -2490,6 +2601,7 @@ watchmedo = ["PyYAML (>=3.10)"] name = "wheel" version = "0.43.0" description = "A built-package format for Python" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2504,6 +2616,7 @@ test = ["pytest (>=6.0.0)", "setuptools (>=65)"] name = "yappi" version = "1.6.0" description = "Yet Another Python Profiler" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -2566,6 +2679,7 @@ test = ["gevent (>=20.6.2)"] name = "zipp" version = "3.18.1" description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" optional = false python-versions = ">=3.8" files = [ @@ -2590,4 +2704,4 @@ sqlite = ["aiosqlite"] [metadata] lock-version = "2.0" python-versions = "^3.8.0" -content-hash = "57525b944571556e307f2795f2d74c63017b4e3fbc10a82f30f323f91f8163e8" +content-hash = "69ac3f442f88e777aeb77154e45fdd3d000cf3eedcfaca1d6b82e2fd568ceb44" diff --git a/pyproject.toml b/pyproject.toml index f345716..b958633 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -128,6 +128,7 @@ pydantic-extra-types = "^2.5.0" watchdog = "<4.0.0" pytest-codspeed = "^2.2.0" mike = "^2.0.0" +faker = "^24.3.0" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/test_relations/test_prefetch_related_with_same_models.py b/tests/test_relations/test_prefetch_related_with_same_models.py new file mode 100644 index 0000000..259163b --- /dev/null +++ b/tests/test_relations/test_prefetch_related_with_same_models.py @@ -0,0 +1,119 @@ +from random import randint +from typing import ForwardRef, Optional + +import ormar +import pytest +from faker import Faker +from ormar.relations.relation_proxy import RelationProxy + +from tests.lifespan import init_tests +from tests.settings import create_config + +base_ormar_config = create_config() +fake = Faker() + + +class Author(ormar.Model): + ormar_config = base_ormar_config.copy(tablename="authors") + + id: int = ormar.Integer(primary_key=True) + name: str = ormar.String(max_length=256) + + +class BookAuthor(ormar.Model): + ormar_config = base_ormar_config.copy(tablename="book_authors") + + id: int = ormar.Integer(primary_key=True) + + +class BookCoAuthor(ormar.Model): + ormar_config = base_ormar_config.copy(tablename="book_co_authors") + + id: int = ormar.Integer(primary_key=True) + + +class Book(ormar.Model): + ormar_config = base_ormar_config.copy(tablename="books") + + id: int = ormar.Integer(primary_key=True) + name: str = ormar.String(max_length=256) + description: Optional[str] = ormar.String(max_length=256, nullable=True) + authors: RelationProxy[Author] = ormar.ManyToMany( + Author, related_name="author_books", through=BookAuthor + ) + co_authors: RelationProxy[Author] = ormar.ManyToMany( + Author, related_name="co_author_books", through=BookCoAuthor + ) + + +class SelfRef(ormar.Model): + ormar_config = base_ormar_config.copy(tablename="selfrefs") + + id: int = ormar.Integer(primary_key=True) + name: str = ormar.String(max_length=100) + main_child = ormar.ForeignKey(to=ForwardRef("SelfRef"), related_name="parent") + children: RelationProxy["SelfRef"] = ormar.ManyToMany(ForwardRef("SelfRef")) + + +SelfRef.update_forward_refs() + +create_test_database = init_tests(base_ormar_config) + + +@pytest.mark.asyncio +async def test_prefetch_related_with_same_model_relations() -> None: + async with base_ormar_config.database: + for _ in range(6): + await Author.objects.create(name=fake.name()) + + book = await Book.objects.create(name=fake.sentence(nb_words=randint(1, 4))) + for i in range(1, 3): + await book.authors.add(await Author.objects.get(id=i)) + for i in range(3, 6): + await book.co_authors.add(await Author.objects.get(id=i)) + + prefetch_result = await Book.objects.prefetch_related( + ["authors", "co_authors"] + ).all() + prefetch_dict_result = [x.dict() for x in prefetch_result if x.id == 1][0] + select_result = await Book.objects.select_related( + ["authors", "co_authors"] + ).all() + select_dict_result = [ + x.dict( + exclude={ + "authors": {"bookauthor": ...}, + "co_authors": {"bookcoauthor": ...}, + } + ) + for x in select_result + if x.id == 1 + ][0] + assert prefetch_dict_result == select_dict_result + + +@pytest.mark.asyncio +async def test_prefetch_related_with_self_referencing() -> None: + async with base_ormar_config.database: + main_child = await SelfRef.objects.create(name="MainChild") + main = await SelfRef.objects.create(name="Main", main_child=main_child) + + child1 = await SelfRef.objects.create(name="Child1") + child2 = await SelfRef.objects.create(name="Child2") + + await main.children.add(child1) + await main.children.add(child2) + + select_result = await SelfRef.objects.select_related( + ["main_child", "children"] + ).get(name="Main") + print(select_result.json(indent=4)) + + prefetch_result = await SelfRef.objects.prefetch_related( + ["main_child", "children"] + ).get(name="Main") + + assert prefetch_result.main_child.name == main_child.name + assert len(prefetch_result.children) == 2 + assert prefetch_result.children[0].name == child1.name + assert prefetch_result.children[1].name == child2.name diff --git a/tests/test_utils/test_queryset_utils.py b/tests/test_utils/test_queryset_utils.py index 8c6320b..5683326 100644 --- a/tests/test_utils/test_queryset_utils.py +++ b/tests/test_utils/test_queryset_utils.py @@ -1,5 +1,3 @@ -import ormar -from ormar.queryset.queries.prefetch_query import sort_models from ormar.queryset.utils import ( subtract_dict, translate_list_to_dict, @@ -7,7 +5,6 @@ from ormar.queryset.utils import ( update_dict_from_list, ) -from tests.lifespan import init_tests from tests.settings import create_config base_ormar_config = create_config() @@ -172,39 +169,3 @@ def test_subtracting_with_set_and_dict(): } test = subtract_dict(curr_dict, dict_to_update) assert test == {"translation": {"translations": {"language": Ellipsis}}} - - -class SortModel(ormar.Model): - ormar_config = base_ormar_config.copy(tablename="sorts") - - id: int = ormar.Integer(primary_key=True) - name: str = ormar.String(max_length=100) - sort_order: int = ormar.Integer() - - -def test_sorting_models(): - models = [ - SortModel(id=1, name="Alice", sort_order=0), - SortModel(id=2, name="Al", sort_order=1), - SortModel(id=3, name="Zake", sort_order=1), - SortModel(id=4, name="Will", sort_order=0), - SortModel(id=5, name="Al", sort_order=2), - SortModel(id=6, name="Alice", sort_order=2), - ] - orders_by = {"name": "asc", "none": {}, "sort_order": "desc"} - models = sort_models(models, orders_by) - assert models[5].name == "Zake" - assert models[0].name == "Al" - assert models[1].name == "Al" - assert [model.id for model in models] == [5, 2, 6, 1, 4, 3] - - orders_by = {"name": "asc", "none": set("aa"), "id": "asc"} - models = sort_models(models, orders_by) - assert [model.id for model in models] == [2, 5, 1, 6, 4, 3] - - orders_by = {"sort_order": "asc", "none": ..., "id": "asc", "uu": 2, "aa": None} - models = sort_models(models, orders_by) - assert [model.id for model in models] == [1, 4, 2, 3, 5, 6] - - -create_test_database = init_tests(base_ormar_config)