improve date handling
This commit is contained in:
@ -108,14 +108,18 @@ You can use either `length` and `precision` parameters or `max_digits` and `deci
|
|||||||
|
|
||||||
### Time
|
### Time
|
||||||
|
|
||||||
`Time()` has no required parameters.
|
`Time(timezone: bool = False)` has no required parameters.
|
||||||
|
|
||||||
|
You can pass `timezone=True` for timezone aware database column.
|
||||||
|
|
||||||
* Sqlalchemy column: `sqlalchemy.Time`
|
* Sqlalchemy column: `sqlalchemy.Time`
|
||||||
* Type (used for pydantic): `datetime.time`
|
* Type (used for pydantic): `datetime.time`
|
||||||
|
|
||||||
### DateTime
|
### DateTime
|
||||||
|
|
||||||
`DateTime()` has no required parameters.
|
`DateTime(timezone: bool = False)` has no required parameters.
|
||||||
|
|
||||||
|
You can pass `timezone=True` for timezone aware database column.
|
||||||
|
|
||||||
* Sqlalchemy column: `sqlalchemy.DateTime`
|
* Sqlalchemy column: `sqlalchemy.DateTime`
|
||||||
* Type (used for pydantic): `datetime.datetime`
|
* Type (used for pydantic): `datetime.datetime`
|
||||||
|
|||||||
@ -44,6 +44,22 @@ pip install ormar[sqlite]
|
|||||||
|
|
||||||
Will install also `aiosqlite`.
|
Will install also `aiosqlite`.
|
||||||
|
|
||||||
|
### Orjson
|
||||||
|
|
||||||
|
```py
|
||||||
|
pip install ormar[orjson]
|
||||||
|
```
|
||||||
|
|
||||||
|
Will install also `orjson` that is much faster than builtin json parser.
|
||||||
|
|
||||||
|
### Crypto
|
||||||
|
|
||||||
|
```py
|
||||||
|
pip install ormar[crypto]
|
||||||
|
```
|
||||||
|
|
||||||
|
Will install also `cryptography` that is required to work with encrypted columns.
|
||||||
|
|
||||||
### Manual installation of dependencies
|
### Manual installation of dependencies
|
||||||
|
|
||||||
Of course, you can also install these requirements manually with `pip install asyncpg` etc.
|
Of course, you can also install these requirements manually with `pip install asyncpg` etc.
|
||||||
|
|||||||
@ -1,3 +1,15 @@
|
|||||||
|
# 0.10.14
|
||||||
|
|
||||||
|
## ✨ Features
|
||||||
|
|
||||||
|
* Allow passing `timezone:bool = False` parameter to `DateTime` and `Time` fields for timezone aware database columns [#264](https://github.com/collerek/ormar/issues/264)
|
||||||
|
* Allow passing datetime, date and time for filter on `DateTime`, `Time` and `Date` fields to allow filtering by datetimes instead of converting the value to string [#79](https://github.com/collerek/ormar/issues/79)
|
||||||
|
|
||||||
|
## 🐛 Fixes
|
||||||
|
|
||||||
|
* Fix dependencies from `psycopg2` to `psycopg2-binary` [#255](https://github.com/collerek/ormar/issues/255)
|
||||||
|
|
||||||
|
|
||||||
# 0.10.13
|
# 0.10.13
|
||||||
|
|
||||||
## ✨ Features
|
## ✨ Features
|
||||||
|
|||||||
@ -76,7 +76,7 @@ class UndefinedType: # pragma no cover
|
|||||||
|
|
||||||
Undefined = UndefinedType()
|
Undefined = UndefinedType()
|
||||||
|
|
||||||
__version__ = "0.10.13"
|
__version__ = "0.10.14"
|
||||||
__all__ = [
|
__all__ = [
|
||||||
"Integer",
|
"Integer",
|
||||||
"BigInteger",
|
"BigInteger",
|
||||||
|
|||||||
@ -351,6 +351,19 @@ class DateTime(ModelFieldFactory, datetime.datetime):
|
|||||||
_type = datetime.datetime
|
_type = datetime.datetime
|
||||||
_sample = "datetime"
|
_sample = "datetime"
|
||||||
|
|
||||||
|
def __new__( # type: ignore # noqa CFQ002
|
||||||
|
cls, *, timezone: bool = False, **kwargs: Any
|
||||||
|
) -> BaseField: # type: ignore
|
||||||
|
kwargs = {
|
||||||
|
**kwargs,
|
||||||
|
**{
|
||||||
|
k: v
|
||||||
|
for k, v in locals().items()
|
||||||
|
if k not in ["cls", "__class__", "kwargs"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return super().__new__(cls, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_column_type(cls, **kwargs: Any) -> Any:
|
def get_column_type(cls, **kwargs: Any) -> Any:
|
||||||
"""
|
"""
|
||||||
@ -362,7 +375,7 @@ class DateTime(ModelFieldFactory, datetime.datetime):
|
|||||||
:return: initialized column with proper options
|
:return: initialized column with proper options
|
||||||
:rtype: sqlalchemy Column
|
:rtype: sqlalchemy Column
|
||||||
"""
|
"""
|
||||||
return sqlalchemy.DateTime()
|
return sqlalchemy.DateTime(timezone=kwargs.get("timezone", False))
|
||||||
|
|
||||||
|
|
||||||
class Date(ModelFieldFactory, datetime.date):
|
class Date(ModelFieldFactory, datetime.date):
|
||||||
@ -395,6 +408,19 @@ class Time(ModelFieldFactory, datetime.time):
|
|||||||
_type = datetime.time
|
_type = datetime.time
|
||||||
_sample = "time"
|
_sample = "time"
|
||||||
|
|
||||||
|
def __new__( # type: ignore # noqa CFQ002
|
||||||
|
cls, *, timezone: bool = False, **kwargs: Any
|
||||||
|
) -> BaseField: # type: ignore
|
||||||
|
kwargs = {
|
||||||
|
**kwargs,
|
||||||
|
**{
|
||||||
|
k: v
|
||||||
|
for k, v in locals().items()
|
||||||
|
if k not in ["cls", "__class__", "kwargs"]
|
||||||
|
},
|
||||||
|
}
|
||||||
|
return super().__new__(cls, **kwargs)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_column_type(cls, **kwargs: Any) -> Any:
|
def get_column_type(cls, **kwargs: Any) -> Any:
|
||||||
"""
|
"""
|
||||||
@ -406,7 +432,7 @@ class Time(ModelFieldFactory, datetime.time):
|
|||||||
:return: initialized column with proper options
|
:return: initialized column with proper options
|
||||||
:rtype: sqlalchemy Column
|
:rtype: sqlalchemy Column
|
||||||
"""
|
"""
|
||||||
return sqlalchemy.Time()
|
return sqlalchemy.Time(timezone=kwargs.get("timezone", False))
|
||||||
|
|
||||||
|
|
||||||
class JSON(ModelFieldFactory, pydantic.Json):
|
class JSON(ModelFieldFactory, pydantic.Json):
|
||||||
|
|||||||
@ -1,3 +1,4 @@
|
|||||||
|
import datetime
|
||||||
from typing import Any, Dict, TYPE_CHECKING, Type
|
from typing import Any, Dict, TYPE_CHECKING, Type
|
||||||
|
|
||||||
import sqlalchemy
|
import sqlalchemy
|
||||||
@ -138,6 +139,19 @@ class FilterAction(QueryAction):
|
|||||||
if isinstance(self.filter_value, ormar.Model):
|
if isinstance(self.filter_value, ormar.Model):
|
||||||
self.filter_value = self.filter_value.pk
|
self.filter_value = self.filter_value.pk
|
||||||
|
|
||||||
|
if isinstance(
|
||||||
|
self.filter_value, (datetime.date, datetime.time, datetime.datetime)
|
||||||
|
):
|
||||||
|
self.filter_value = self.filter_value.isoformat()
|
||||||
|
|
||||||
|
if isinstance(self.filter_value, (list, tuple, set)):
|
||||||
|
self.filter_value = [
|
||||||
|
x.isoformat()
|
||||||
|
if isinstance(x, (datetime.date, datetime.time, datetime.datetime))
|
||||||
|
else x
|
||||||
|
for x in self.filter_value
|
||||||
|
]
|
||||||
|
|
||||||
op_attr = FILTER_OPERATORS[self.operator]
|
op_attr = FILTER_OPERATORS[self.operator]
|
||||||
if self.operator == "isnull":
|
if self.operator == "isnull":
|
||||||
op_attr = "is_" if self.filter_value else "isnot"
|
op_attr = "is_" if self.filter_value else "isnot"
|
||||||
|
|||||||
2
setup.py
2
setup.py
@ -69,7 +69,7 @@ setup(
|
|||||||
"typing_extensions>=3.7,<=3.7.4.3",
|
"typing_extensions>=3.7,<=3.7.4.3",
|
||||||
],
|
],
|
||||||
extras_require={
|
extras_require={
|
||||||
"postgresql": ["asyncpg", "psycopg2"],
|
"postgresql": ["asyncpg", "psycopg2-binary"],
|
||||||
"mysql": ["aiomysql", "pymysql"],
|
"mysql": ["aiomysql", "pymysql"],
|
||||||
"sqlite": ["aiosqlite"],
|
"sqlite": ["aiosqlite"],
|
||||||
"orjson": ["orjson"],
|
"orjson": ["orjson"],
|
||||||
|
|||||||
118
tests/test_model_definition/test_dates_with_timezone.py
Normal file
118
tests/test_model_definition/test_dates_with_timezone.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
from datetime import timezone, timedelta, datetime, date, time
|
||||||
|
|
||||||
|
import databases
|
||||||
|
import pytest
|
||||||
|
import sqlalchemy
|
||||||
|
|
||||||
|
import ormar
|
||||||
|
|
||||||
|
from tests.settings import DATABASE_URL
|
||||||
|
|
||||||
|
database = databases.Database(DATABASE_URL, force_rollback=True)
|
||||||
|
metadata = sqlalchemy.MetaData()
|
||||||
|
|
||||||
|
|
||||||
|
class DateFieldsModel(ormar.Model):
|
||||||
|
class Meta:
|
||||||
|
database = database
|
||||||
|
metadata = metadata
|
||||||
|
|
||||||
|
id: int = ormar.Integer(primary_key=True)
|
||||||
|
created_date: datetime = ormar.DateTime(
|
||||||
|
default=datetime.now(tz=timezone(timedelta(hours=3))), timezone=True
|
||||||
|
)
|
||||||
|
updated_date: datetime = ormar.DateTime(
|
||||||
|
default=datetime.now(tz=timezone(timedelta(hours=3))),
|
||||||
|
name="modification_date",
|
||||||
|
timezone=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class SampleModel(ormar.Model):
|
||||||
|
class Meta:
|
||||||
|
database = database
|
||||||
|
metadata = metadata
|
||||||
|
|
||||||
|
id: int = ormar.Integer(primary_key=True)
|
||||||
|
updated_at: datetime = ormar.DateTime()
|
||||||
|
|
||||||
|
|
||||||
|
class TimeModel(ormar.Model):
|
||||||
|
class Meta:
|
||||||
|
database = database
|
||||||
|
metadata = metadata
|
||||||
|
|
||||||
|
id: int = ormar.Integer(primary_key=True)
|
||||||
|
elapsed: time = ormar.Time()
|
||||||
|
|
||||||
|
|
||||||
|
class DateModel(ormar.Model):
|
||||||
|
class Meta:
|
||||||
|
database = database
|
||||||
|
metadata = metadata
|
||||||
|
|
||||||
|
id: int = ormar.Integer(primary_key=True)
|
||||||
|
creation_date: date = ormar.Date()
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(autouse=True, scope="module")
|
||||||
|
def create_test_database():
|
||||||
|
engine = sqlalchemy.create_engine(DATABASE_URL)
|
||||||
|
metadata.drop_all(engine)
|
||||||
|
metadata.create_all(engine)
|
||||||
|
yield
|
||||||
|
metadata.drop_all(engine)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_model_crud_with_timezone():
|
||||||
|
async with database:
|
||||||
|
datemodel = await DateFieldsModel().save()
|
||||||
|
assert datemodel.created_date is not None
|
||||||
|
assert datemodel.updated_date is not None
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_query_with_datetime_in_filter():
|
||||||
|
async with database:
|
||||||
|
creation_dt = datetime(2021, 5, 18, 0, 0, 0, 0)
|
||||||
|
sample = await SampleModel.objects.create(updated_at=creation_dt)
|
||||||
|
|
||||||
|
current_dt = datetime(2021, 5, 19, 0, 0, 0, 0)
|
||||||
|
outdated_samples = await SampleModel.objects.filter(
|
||||||
|
updated_at__lt=current_dt
|
||||||
|
).all()
|
||||||
|
|
||||||
|
assert outdated_samples[0] == sample
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_query_with_date_in_filter():
|
||||||
|
async with database:
|
||||||
|
sample = await TimeModel.objects.create(elapsed=time(0, 20, 20))
|
||||||
|
await TimeModel.objects.create(elapsed=time(0, 12, 0))
|
||||||
|
await TimeModel.objects.create(elapsed=time(0, 19, 55))
|
||||||
|
sample4 = await TimeModel.objects.create(elapsed=time(0, 21, 15))
|
||||||
|
|
||||||
|
threshold = time(0, 20, 0)
|
||||||
|
samples = await TimeModel.objects.filter(TimeModel.elapsed >= threshold).all()
|
||||||
|
|
||||||
|
assert len(samples) == 2
|
||||||
|
assert samples[0] == sample
|
||||||
|
assert samples[1] == sample4
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_query_with_time_in_filter():
|
||||||
|
async with database:
|
||||||
|
await DateModel.objects.create(creation_date=date(2021, 5, 18))
|
||||||
|
sample2 = await DateModel.objects.create(creation_date=date(2021, 5, 19))
|
||||||
|
sample3 = await DateModel.objects.create(creation_date=date(2021, 5, 20))
|
||||||
|
|
||||||
|
outdated_samples = await DateModel.objects.filter(
|
||||||
|
creation_date__in=[date(2021, 5, 19), date(2021, 5, 20)]
|
||||||
|
).all()
|
||||||
|
|
||||||
|
assert len(outdated_samples) == 2
|
||||||
|
assert outdated_samples[0] == sample2
|
||||||
|
assert outdated_samples[1] == sample3
|
||||||
@ -185,25 +185,14 @@ def test_combining_groups_together():
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# @pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
# async def test_filtering_by_field_access():
|
async def test_filtering_by_field_access():
|
||||||
# async with database:
|
async with database:
|
||||||
# async with database.transaction(force_rollback=True):
|
async with database.transaction(force_rollback=True):
|
||||||
# category = await Category(name='Toys').save()
|
category = await Category(name="Toys").save()
|
||||||
# product1 = await Product(name="G.I Joe",
|
product2 = await Product(
|
||||||
# rating=4.7,
|
name="My Little Pony", rating=3.8, category=category
|
||||||
# category=category).save()
|
).save()
|
||||||
# product2 = await Product(name="My Little Pony",
|
|
||||||
# rating=3.8,
|
|
||||||
# category=category).save()
|
|
||||||
#
|
|
||||||
# check = Product.object.get(Product.name == "My Little Pony")
|
|
||||||
# assert check == product2
|
|
||||||
|
|
||||||
# TODO: Finish implementation
|
check = await Product.objects.get(Product.name == "My Little Pony")
|
||||||
# * overload operators and add missing functions that return FilterAction (V)
|
assert check == product2
|
||||||
# * return OrderAction for desc() and asc() (V)
|
|
||||||
# * create filter groups for & and | (and ~ - NOT?) (V)
|
|
||||||
# * accept args in all functions that accept filters? or only filter and exclude? (V)
|
|
||||||
# all functions: delete, first, get, get_or_none, get_or_create, all, filter, exclude
|
|
||||||
# * accept OrderActions in order_by (V)
|
|
||||||
|
|||||||
Reference in New Issue
Block a user