Skip to content

Commit

Permalink
Merge pull request #1193 from plone/time
Browse files Browse the repository at this point in the history
Being able to use schema.Time
  • Loading branch information
bloodbare authored Jan 23, 2024
2 parents 9511aa3 + 26eda2c commit 41c45f0
Show file tree
Hide file tree
Showing 4 changed files with 63 additions and 0 deletions.
2 changes: 2 additions & 0 deletions CHANGELOG.rst
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ CHANGELOG
7.0.1 (unreleased)
------------------

- Being able to use schema.Time
[nilbacardit26]
- Feat: Add metadata info to workflows
- Fix: Update workflow vocabulary name
- Feat: Update workflow vocabulary title attribute to use metadata
Expand Down
9 changes: 9 additions & 0 deletions guillotina/schema/_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,15 @@ class Time(Orderable, Field):
__doc__ = ITime.__doc__
_type = time

def _validate(self, value):
try:
if isinstance(value, str):
args = [int(unit_time) for unit_time in value.split(":")]
value = time(*args)
except Exception:
raise WrongType(value, self._type, self.__name__)
super(Time, self)._validate(value)


@implementer(IChoice, IFromUnicode)
class Choice(Field):
Expand Down
45 changes: 45 additions & 0 deletions guillotina/tests/test_api.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datetime import datetime
from datetime import time
from guillotina import configure
from guillotina import schema
from guillotina.addons import Addon
Expand Down Expand Up @@ -1244,6 +1245,7 @@ class ITestSchema(Interface):

object_a = schema.Object(IObjectA, required=False)
list_object_a = PatchField(schema.List(value_type=schema.Object(IObjectA), required=False))
time_ = schema.Time(min=time(9, 0, 0), max=time(12, 0, 0), required=False)


@contenttype(type_name="TestSchema", schema=ITestSchema)
Expand Down Expand Up @@ -1319,6 +1321,49 @@ async def test_field_values_list_bucket(container_requester):
assert status == 410


async def test_time_field_validation(container_requester):
async with container_requester as requester:
resp, status = await requester(
"POST",
"/db/guillotina/",
data=json.dumps({"@type": "TestSchema", "time_": "9:00:00", "id": "foo_item"}),
)
assert status == 201

resp, status = await requester("GET", "/db/guillotina/foo_item")
assert status == 200
assert resp["time_"] == "9:00:00"

resp, status = await requester(
"POST",
"/db/guillotina/",
data=json.dumps({"@type": "TestSchema", "time_": "9:70:00"}),
)
assert status == 412

resp, status = await requester(
"POST",
"/db/guillotina/",
data=json.dumps({"@type": "TestSchema", "time_": "12:00:01"}),
)
assert status == 412
assert resp["deserialization_errors"][0]["message"] == "Value is too big"

resp, status = await requester(
"POST",
"/db/guillotina/",
data=json.dumps({"@type": "TestSchema", "time_": "8:59:59"}),
)
assert status == 412
assert resp["deserialization_errors"][0]["message"] == "Value is too small"
resp, status = await requester(
"POST",
"/db/guillotina/",
data=json.dumps({"@type": "TestSchema", "time_": 3600}),
)
assert status == 412


async def test_patch_field_validation(container_requester):
async with container_requester as requester:
resp, status = await requester(
Expand Down
7 changes: 7 additions & 0 deletions guillotina/tests/test_serialize.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
from datetime import datetime
from datetime import time
from dateutil.tz import tzutc
from guillotina import fields
from guillotina import schema
Expand Down Expand Up @@ -264,6 +265,12 @@ async def test_deserialize_datetime(dummy_guillotina):
assert converted.minute == now.minute


async def test_deserialize_time(dummy_guillotina):
now = time.fromisoformat("10:00:00")
converted = schema_compatible(now, ITestSchema["time"])
assert converted.minute == now.minute


async def test_check_permission_deserialize_content(dummy_request):
login()
content = create_content()
Expand Down

0 comments on commit 41c45f0

Please sign in to comment.