mirror of
https://github.com/mealie-recipes/mealie.git
synced 2025-03-12 04:35:35 -07:00
chore(deps): update dependency ruff to ^0.9.0 (#4871)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> Co-authored-by: Michael Genson <71845777+michael-genson@users.noreply.github.com> Co-authored-by: Kuchenpirat <24235032+Kuchenpirat@users.noreply.github.com>
This commit is contained in:
parent
ea0bec2336
commit
2c2de1e95b
.pre-commit-config.yamlpoetry.lockpyproject.toml
mealie
db/fixes
repos
services
tests
integration_tests
user_household_tests
user_recipe_tests
unit_tests
@ -12,7 +12,7 @@ repos:
|
||||
exclude: ^tests/data/
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
# Ruff version.
|
||||
rev: v0.9.1
|
||||
rev: v0.9.0
|
||||
hooks:
|
||||
- id: ruff
|
||||
- id: ruff-format
|
||||
|
@ -26,8 +26,7 @@ def fix_group_with_no_name(session: Session):
|
||||
return
|
||||
|
||||
logger.info(
|
||||
f'{len(groups)} {"group" if len(groups) == 1 else "groups"} found with a missing name; '
|
||||
f"applying default name"
|
||||
f"{len(groups)} {'group' if len(groups) == 1 else 'groups'} found with a missing name; applying default name"
|
||||
)
|
||||
|
||||
offset = 0
|
||||
|
@ -51,7 +51,7 @@ def fix_dangling_refs(session: Session):
|
||||
|
||||
if result.rowcount:
|
||||
logger.info(
|
||||
f'Reassigned {result.rowcount} {"row" if result.rowcount == 1 else "rows"} '
|
||||
f"Reassigned {result.rowcount} {'row' if result.rowcount == 1 else 'rows'} "
|
||||
f'in "{table_name}" table to default user ({default_user.id})'
|
||||
)
|
||||
|
||||
@ -63,7 +63,7 @@ def fix_dangling_refs(session: Session):
|
||||
|
||||
if result.rowcount:
|
||||
logger.info(
|
||||
f'Deleted {result.rowcount} {"row" if result.rowcount == 1 else "rows"} '
|
||||
f"Deleted {result.rowcount} {'row' if result.rowcount == 1 else 'rows'} "
|
||||
f'in "{table_name}" table with invalid user ids'
|
||||
)
|
||||
|
||||
|
@ -24,7 +24,7 @@ class RepositoryCookbooks(HouseholdRepositoryGeneric[ReadCookBook, CookBook]):
|
||||
return super().create(data)
|
||||
except IntegrityError:
|
||||
self.session.rollback()
|
||||
data.slug = slugify(f"{data.name} ({i+1})")
|
||||
data.slug = slugify(f"{data.name} ({i + 1})")
|
||||
|
||||
raise # raise the last IntegrityError
|
||||
|
||||
@ -45,7 +45,7 @@ class RepositoryCookbooks(HouseholdRepositoryGeneric[ReadCookBook, CookBook]):
|
||||
return super().update(match_value, data)
|
||||
except IntegrityError:
|
||||
self.session.rollback()
|
||||
data.slug = slugify(f"{data.name} ({i+1})")
|
||||
data.slug = slugify(f"{data.name} ({i + 1})")
|
||||
|
||||
raise # raise the last IntegrityError
|
||||
|
||||
|
@ -112,7 +112,7 @@ class TandoorMigrator(BaseMigrator):
|
||||
recipes_as_dicts: list[dict] = []
|
||||
for i, recipe_zip_file in enumerate(source_dir.glob("*.zip")):
|
||||
try:
|
||||
recipe_dir = str(source_dir.joinpath(f"recipe_{i+1}"))
|
||||
recipe_dir = str(source_dir.joinpath(f"recipe_{i + 1}"))
|
||||
os.makedirs(recipe_dir)
|
||||
|
||||
with zipfile.ZipFile(recipe_zip_file) as recipe_zip:
|
||||
|
@ -129,10 +129,7 @@ def clean_image(image: str | list | dict | None = None, default: str = "no image
|
||||
case [{"@id": str(_)}, *_]:
|
||||
return [x["@id"] for x in image if "@id" in x]
|
||||
case _:
|
||||
logger.exception(
|
||||
f"Unexpected type for image: {
|
||||
type(image)}, {image}"
|
||||
)
|
||||
logger.exception(f"Unexpected type for image: {type(image)}, {image}")
|
||||
return [default]
|
||||
|
||||
|
||||
@ -227,10 +224,7 @@ def clean_instructions(steps_object: list | dict | str, default: list | None = N
|
||||
)
|
||||
)
|
||||
case _:
|
||||
raise TypeError(
|
||||
f"Unexpected type for instructions: {
|
||||
type(steps_object)}, {steps_object}"
|
||||
)
|
||||
raise TypeError(f"Unexpected type for instructions: {type(steps_object)}, {steps_object}")
|
||||
|
||||
|
||||
def _sanitize_instruction_text(line: str | dict) -> str:
|
||||
@ -290,10 +284,7 @@ def clean_ingredients(ingredients: list | str | None, default: list | None = Non
|
||||
case str(ingredients):
|
||||
return [clean_string(ingredient) for ingredient in ingredients.splitlines() if ingredient.strip()]
|
||||
case _:
|
||||
raise TypeError(
|
||||
f"Unexpected type for ingredients: {
|
||||
type(ingredients)}, {ingredients}"
|
||||
)
|
||||
raise TypeError(f"Unexpected type for ingredients: {type(ingredients)}, {ingredients}")
|
||||
|
||||
|
||||
def clean_int(val: str | int | None, min: int | None = None, max: int | None = None):
|
||||
@ -531,10 +522,7 @@ def clean_categories(category: str | list) -> list[str]:
|
||||
#
|
||||
return [cat["name"] for cat in category if "name" in cat]
|
||||
case _:
|
||||
raise TypeError(
|
||||
f"Unexpected type for category: {
|
||||
type(category)}, {category}"
|
||||
)
|
||||
raise TypeError(f"Unexpected type for category: {type(category)}, {category}")
|
||||
|
||||
|
||||
def clean_tags(data: str | list[str]) -> list[str]:
|
||||
|
40
poetry.lock
generated
40
poetry.lock
generated
@ -2828,29 +2828,29 @@ jupyter = ["ipywidgets (>=7.5.1,<9)"]
|
||||
|
||||
[[package]]
|
||||
name = "ruff"
|
||||
version = "0.8.6"
|
||||
version = "0.9.0"
|
||||
description = "An extremely fast Python linter and code formatter, written in Rust."
|
||||
optional = false
|
||||
python-versions = ">=3.7"
|
||||
files = [
|
||||
{file = "ruff-0.8.6-py3-none-linux_armv6l.whl", hash = "sha256:defed167955d42c68b407e8f2e6f56ba52520e790aba4ca707a9c88619e580e3"},
|
||||
{file = "ruff-0.8.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:54799ca3d67ae5e0b7a7ac234baa657a9c1784b48ec954a094da7c206e0365b1"},
|
||||
{file = "ruff-0.8.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:e88b8f6d901477c41559ba540beeb5a671e14cd29ebd5683903572f4b40a9807"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0509e8da430228236a18a677fcdb0c1f102dd26d5520f71f79b094963322ed25"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:91a7ddb221779871cf226100e677b5ea38c2d54e9e2c8ed847450ebbdf99b32d"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:248b1fb3f739d01d528cc50b35ee9c4812aa58cc5935998e776bf8ed5b251e75"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bc3c083c50390cf69e7e1b5a5a7303898966be973664ec0c4a4acea82c1d4315"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52d587092ab8df308635762386f45f4638badb0866355b2b86760f6d3c076188"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:61323159cf21bc3897674e5adb27cd9e7700bab6b84de40d7be28c3d46dc67cf"},
|
||||
{file = "ruff-0.8.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ae4478b1471fc0c44ed52a6fb787e641a2ac58b1c1f91763bafbc2faddc5117"},
|
||||
{file = "ruff-0.8.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:0c000a471d519b3e6cfc9c6680025d923b4ca140ce3e4612d1a2ef58e11f11fe"},
|
||||
{file = "ruff-0.8.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:9257aa841e9e8d9b727423086f0fa9a86b6b420fbf4bf9e1465d1250ce8e4d8d"},
|
||||
{file = "ruff-0.8.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:45a56f61b24682f6f6709636949ae8cc82ae229d8d773b4c76c09ec83964a95a"},
|
||||
{file = "ruff-0.8.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:496dd38a53aa173481a7d8866bcd6451bd934d06976a2505028a50583e001b76"},
|
||||
{file = "ruff-0.8.6-py3-none-win32.whl", hash = "sha256:e169ea1b9eae61c99b257dc83b9ee6c76f89042752cb2d83486a7d6e48e8f764"},
|
||||
{file = "ruff-0.8.6-py3-none-win_amd64.whl", hash = "sha256:f1d70bef3d16fdc897ee290d7d20da3cbe4e26349f62e8a0274e7a3f4ce7a905"},
|
||||
{file = "ruff-0.8.6-py3-none-win_arm64.whl", hash = "sha256:7d7fc2377a04b6e04ffe588caad613d0c460eb2ecba4c0ccbbfe2bc973cbc162"},
|
||||
{file = "ruff-0.8.6.tar.gz", hash = "sha256:dcad24b81b62650b0eb8814f576fc65cfee8674772a6e24c9b747911801eeaa5"},
|
||||
{file = "ruff-0.9.0-py3-none-linux_armv6l.whl", hash = "sha256:949b3513f931741e006cf267bf89611edff04e1f012013424022add3ce78f319"},
|
||||
{file = "ruff-0.9.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:99fbcb8c7fe94ae1e462ab2a1ef17cb20b25fb6438b9f198b1bcf5207a0a7916"},
|
||||
{file = "ruff-0.9.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:0b022afd8eb0fcfce1e0adec84322abf4d6ce3cd285b3b99c4f17aae7decf749"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:336567ce92c9ca8ec62780d07b5fa11fbc881dc7bb40958f93a7d621e7ab4589"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d338336c44bda602dc8e8766836ac0441e5b0dfeac3af1bd311a97ebaf087a75"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9b3ececf523d733e90b540e7afcc0494189e8999847f8855747acd5a9a8c45f"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a11c0872a31232e473e2e0e2107f3d294dbadd2f83fb281c3eb1c22a24866924"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b5fd06220c17a9cc0dc7fc6552f2ac4db74e8e8bff9c401d160ac59d00566f54"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0457e775c74bf3976243f910805242b7dcd389e1d440deccbd1194ca17a5728c"},
|
||||
{file = "ruff-0.9.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05415599bbcb318f730ea1b46a39e4fbf71f6a63fdbfa1dda92efb55f19d7ecf"},
|
||||
{file = "ruff-0.9.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:fbf9864b009e43cfc1c8bed1a6a4c529156913105780af4141ca4342148517f5"},
|
||||
{file = "ruff-0.9.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:37b3da222b12e2bb2ce628e02586ab4846b1ed7f31f42a5a0683b213453b2d49"},
|
||||
{file = "ruff-0.9.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:733c0fcf2eb0c90055100b4ed1af9c9d87305b901a8feb6a0451fa53ed88199d"},
|
||||
{file = "ruff-0.9.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:8221a454bfe5ccdf8017512fd6bb60e6ec30f9ea252b8a80e5b73619f6c3cefd"},
|
||||
{file = "ruff-0.9.0-py3-none-win32.whl", hash = "sha256:d345f2178afd192c7991ddee59155c58145e12ad81310b509bd2e25c5b0247b3"},
|
||||
{file = "ruff-0.9.0-py3-none-win_amd64.whl", hash = "sha256:0cbc0905d94d21305872f7f8224e30f4bbcd532bc21b2225b2446d8fc7220d19"},
|
||||
{file = "ruff-0.9.0-py3-none-win_arm64.whl", hash = "sha256:7b1148771c6ca88f820d761350a053a5794bc58e0867739ea93eb5e41ad978cd"},
|
||||
{file = "ruff-0.9.0.tar.gz", hash = "sha256:143f68fa5560ecf10fc49878b73cee3eab98b777fcf43b0e62d43d42f5ef9d8b"},
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@ -3421,4 +3421,4 @@ pgsql = ["psycopg2-binary"]
|
||||
[metadata]
|
||||
lock-version = "2.0"
|
||||
python-versions = "^3.12"
|
||||
content-hash = "ee922a45c721fc906d1b41e9b693211c57795900e05f59fdf802e1c2d7481bef"
|
||||
content-hash = "1561f8552de2253187d19299bb098f0ab53b1def90a0fa121a3cba379d5b0da8"
|
||||
|
@ -64,7 +64,7 @@ pylint = "^3.0.0"
|
||||
pytest = "^8.0.0"
|
||||
pytest-asyncio = "^0.25.0"
|
||||
rich = "^13.5.2"
|
||||
ruff = "^0.8.0"
|
||||
ruff = "^0.9.0"
|
||||
types-PyYAML = "^6.0.4"
|
||||
types-python-dateutil = "^2.8.18"
|
||||
types-python-slugify = "^6.0.0"
|
||||
|
@ -1,5 +1,5 @@
|
||||
import random
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
@ -42,11 +42,11 @@ def create_rule(
|
||||
):
|
||||
qf_parts: list[str] = []
|
||||
if tags:
|
||||
qf_parts.append(f'tags.id CONTAINS ALL [{",".join([str(tag.id) for tag in tags])}]')
|
||||
qf_parts.append(f"tags.id CONTAINS ALL [{','.join([str(tag.id) for tag in tags])}]")
|
||||
if categories:
|
||||
qf_parts.append(f'recipe_category.id CONTAINS ALL [{",".join([str(cat.id) for cat in categories])}]')
|
||||
qf_parts.append(f"recipe_category.id CONTAINS ALL [{','.join([str(cat.id) for cat in categories])}]")
|
||||
if households:
|
||||
qf_parts.append(f'household_id IN [{",".join([str(household.id) for household in households])}]')
|
||||
qf_parts.append(f"household_id IN [{','.join([str(household.id) for household in households])}]")
|
||||
|
||||
query_filter_string = " AND ".join(qf_parts)
|
||||
return unique_user.repos.group_meal_plan_rules.create(
|
||||
@ -64,9 +64,9 @@ def test_create_mealplan_no_recipe(api_client: TestClient, unique_user: TestUser
|
||||
title = random_string(length=25)
|
||||
text = random_string(length=25)
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="breakfast", title=title, text=text
|
||||
date=datetime.now(UTC).date(), entry_type="breakfast", title=title, text=text
|
||||
).model_dump()
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().strftime("%Y-%m-%d")
|
||||
new_plan["date"] = datetime.now(UTC).date().strftime("%Y-%m-%d")
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
|
||||
@ -86,10 +86,10 @@ def test_create_mealplan_with_recipe(api_client: TestClient, unique_user: TestUs
|
||||
recipe = response.json()
|
||||
recipe_id = recipe["id"]
|
||||
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="dinner", recipe_id=recipe_id
|
||||
).model_dump(by_alias=True)
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().strftime("%Y-%m-%d")
|
||||
new_plan = CreatePlanEntry(date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=recipe_id).model_dump(
|
||||
by_alias=True
|
||||
)
|
||||
new_plan["date"] = datetime.now(UTC).date().strftime("%Y-%m-%d")
|
||||
new_plan["recipeId"] = str(recipe_id)
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
@ -101,14 +101,14 @@ def test_create_mealplan_with_recipe(api_client: TestClient, unique_user: TestUs
|
||||
|
||||
def test_crud_mealplan(api_client: TestClient, unique_user: TestUser):
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(),
|
||||
date=datetime.now(UTC).date(),
|
||||
entry_type="breakfast",
|
||||
title=random_string(),
|
||||
text=random_string(),
|
||||
).model_dump()
|
||||
|
||||
# Create
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().strftime("%Y-%m-%d")
|
||||
new_plan["date"] = datetime.now(UTC).date().strftime("%Y-%m-%d")
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
response_json = response.json()
|
||||
assert response.status_code == 201
|
||||
@ -139,13 +139,13 @@ def test_crud_mealplan(api_client: TestClient, unique_user: TestUser):
|
||||
def test_get_all_mealplans(api_client: TestClient, unique_user: TestUser):
|
||||
for _ in range(3):
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(),
|
||||
date=datetime.now(UTC).date(),
|
||||
entry_type="breakfast",
|
||||
title=random_string(),
|
||||
text=random_string(),
|
||||
).model_dump()
|
||||
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().strftime("%Y-%m-%d")
|
||||
new_plan["date"] = datetime.now(UTC).date().strftime("%Y-%m-%d")
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
assert response.status_code == 201
|
||||
|
||||
@ -159,7 +159,7 @@ def test_get_all_mealplans(api_client: TestClient, unique_user: TestUser):
|
||||
|
||||
def test_get_slice_mealplans(api_client: TestClient, unique_user: TestUser):
|
||||
# Make List of 10 dates from now to +10 days
|
||||
dates = [datetime.now(timezone.utc).date() + timedelta(days=x) for x in range(10)]
|
||||
dates = [datetime.now(UTC).date() + timedelta(days=x) for x in range(10)]
|
||||
|
||||
# Make a list of 10 meal plans
|
||||
meal_plans = [
|
||||
@ -193,7 +193,7 @@ def test_get_mealplan_today(api_client: TestClient, unique_user: TestUser):
|
||||
# Create Meal Plans for today
|
||||
test_meal_plans = [
|
||||
CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="breakfast", title=random_string(), text=random_string()
|
||||
date=datetime.now(UTC).date(), entry_type="breakfast", title=random_string(), text=random_string()
|
||||
).model_dump()
|
||||
for _ in range(3)
|
||||
]
|
||||
@ -212,7 +212,7 @@ def test_get_mealplan_today(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
|
||||
for meal_plan in response_json:
|
||||
assert meal_plan["date"] == datetime.now(timezone.utc).date().strftime("%Y-%m-%d")
|
||||
assert meal_plan["date"] == datetime.now(UTC).date().strftime("%Y-%m-%d")
|
||||
|
||||
|
||||
def test_get_mealplan_with_rules_categories_and_tags_filter(api_client: TestClient, unique_user: TestUser):
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
@ -14,7 +14,7 @@ def webhook_data():
|
||||
"name": "Test-Name",
|
||||
"url": "https://my-fake-url.com",
|
||||
"time": "00:00",
|
||||
"scheduledTime": datetime.now(timezone.utc),
|
||||
"scheduledTime": datetime.now(UTC),
|
||||
}
|
||||
|
||||
|
||||
@ -41,7 +41,7 @@ def test_read_webhook(api_client: TestClient, unique_user: TestUser, webhook_dat
|
||||
assert webhook["id"] == item_id
|
||||
assert webhook["name"] == webhook_data["name"]
|
||||
assert webhook["url"] == webhook_data["url"]
|
||||
assert webhook["scheduledTime"] == str(webhook_data["scheduledTime"].astimezone(timezone.utc).time())
|
||||
assert webhook["scheduledTime"] == str(webhook_data["scheduledTime"].astimezone(UTC).time())
|
||||
assert webhook["enabled"] == webhook_data["enabled"]
|
||||
|
||||
|
||||
|
@ -6,7 +6,6 @@ import sqlalchemy
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from mealie.core.dependencies.dependencies import validate_file_token
|
||||
from mealie.repos.repository_factory import AllRepositories
|
||||
from mealie.schema.recipe.recipe_bulk_actions import ExportTypes
|
||||
from mealie.schema.recipe.recipe_category import CategorySave, TagSave
|
||||
from tests import utils
|
||||
@ -137,7 +136,7 @@ def test_bulk_export_recipes(api_client: TestClient, unique_user: TestUser, ten_
|
||||
assert validate_file_token(response_data["fileToken"]) == Path(export_path)
|
||||
|
||||
# Use Export Token to download export
|
||||
response = api_client.get(f'/api/utils/download?token={response_data["fileToken"]}')
|
||||
response = api_client.get(f"/api/utils/download?token={response_data['fileToken']}")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
@ -242,7 +242,7 @@ def test_user_can_update_last_made_on_other_household(
|
||||
assert recipe["id"] == str(h2_recipe_id)
|
||||
old_last_made = recipe["lastMade"]
|
||||
|
||||
now = datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")
|
||||
now = datetime.now(UTC).isoformat().replace("+00:00", "Z")
|
||||
response = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(h2_recipe_slug), json={"timestamp": now}, headers=unique_user.token
|
||||
)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
@ -106,7 +106,7 @@ def test_user_update_last_made(api_client: TestClient, user_tuple: list[TestUser
|
||||
response = api_client.put(api_routes.recipes + f"/{recipe_name}", json=recipe, headers=usr_1.token)
|
||||
|
||||
# User 2 should be able to update the last made timestamp
|
||||
last_made_json = {"timestamp": datetime.now(timezone.utc).isoformat().replace("+00:00", "Z")}
|
||||
last_made_json = {"timestamp": datetime.now(UTC).isoformat().replace("+00:00", "Z")}
|
||||
response = api_client.patch(
|
||||
api_routes.recipes_slug_last_made(recipe_name), json=last_made_json, headers=usr_2.token
|
||||
)
|
||||
|
@ -1,7 +1,7 @@
|
||||
import random
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from random import randint
|
||||
from urllib.parse import parse_qsl, urlsplit
|
||||
|
||||
@ -238,7 +238,7 @@ def test_pagination_filter_null(unique_user: TestUser):
|
||||
user_id=unique_user.user_id,
|
||||
group_id=unique_user.group_id,
|
||||
name=random_string(),
|
||||
last_made=datetime.now(timezone.utc),
|
||||
last_made=datetime.now(UTC),
|
||||
)
|
||||
)
|
||||
|
||||
@ -626,7 +626,7 @@ def test_pagination_filter_datetimes(
|
||||
)
|
||||
def test_pagination_order_by_multiple(unique_user: TestUser, order_direction: OrderDirection):
|
||||
database = unique_user.repos
|
||||
current_time = datetime.now(timezone.utc)
|
||||
current_time = datetime.now(UTC)
|
||||
|
||||
alphabet = ["a", "b", "c", "d", "e"]
|
||||
abbreviations = alphabet.copy()
|
||||
@ -687,7 +687,7 @@ def test_pagination_order_by_multiple_directions(
|
||||
unique_user: TestUser, order_by_str: str, order_direction: OrderDirection
|
||||
):
|
||||
database = unique_user.repos
|
||||
current_time = datetime.now(timezone.utc)
|
||||
current_time = datetime.now(UTC)
|
||||
|
||||
alphabet = ["a", "b", "c", "d", "e"]
|
||||
abbreviations = alphabet.copy()
|
||||
@ -735,7 +735,7 @@ def test_pagination_order_by_multiple_directions(
|
||||
)
|
||||
def test_pagination_order_by_nested_model(unique_user: TestUser, order_direction: OrderDirection):
|
||||
database = unique_user.repos
|
||||
current_time = datetime.now(timezone.utc)
|
||||
current_time = datetime.now(UTC)
|
||||
|
||||
alphabet = ["a", "b", "c", "d", "e"]
|
||||
labels = database.group_multi_purpose_labels.create_many(
|
||||
@ -766,7 +766,7 @@ def test_pagination_order_by_nested_model(unique_user: TestUser, order_direction
|
||||
|
||||
def test_pagination_order_by_doesnt_filter(unique_user: TestUser):
|
||||
database = unique_user.repos
|
||||
current_time = datetime.now(timezone.utc)
|
||||
current_time = datetime.now(UTC)
|
||||
|
||||
label = database.group_multi_purpose_labels.create(
|
||||
MultiPurposeLabelSave(name=random_string(), group_id=unique_user.group_id)
|
||||
@ -810,7 +810,7 @@ def test_pagination_order_by_nulls(
|
||||
unique_user: TestUser, null_position: OrderByNullPosition, order_direction: OrderDirection
|
||||
):
|
||||
database = unique_user.repos
|
||||
current_time = datetime.now(timezone.utc)
|
||||
current_time = datetime.now(UTC)
|
||||
|
||||
label = database.group_multi_purpose_labels.create(
|
||||
MultiPurposeLabelSave(name=random_string(), group_id=unique_user.group_id)
|
||||
@ -916,7 +916,7 @@ def test_pagination_shopping_list_items_with_labels(unique_user: TestUser):
|
||||
|
||||
|
||||
def test_pagination_filter_dates(api_client: TestClient, unique_user: TestUser):
|
||||
today = datetime.now(timezone.utc).date()
|
||||
today = datetime.now(UTC).date()
|
||||
|
||||
yesterday = today - timedelta(days=1)
|
||||
tomorrow = today + timedelta(days=1)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from typing import cast
|
||||
from uuid import UUID
|
||||
|
||||
@ -340,12 +340,12 @@ def test_recipe_repo_pagination_by_categories(unique_user: TestUser):
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination_query.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, categories=[category_slug]).items)
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
|
||||
@ -437,12 +437,12 @@ def test_recipe_repo_pagination_by_tags(unique_user: TestUser):
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination_query.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, tags=[tag_slug]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
@ -534,12 +534,12 @@ def test_recipe_repo_pagination_by_tools(unique_user: TestUser):
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination_query.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, tools=[tool_id]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
@ -619,12 +619,12 @@ def test_recipe_repo_pagination_by_foods(unique_user: TestUser):
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination_query.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination_query.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(database.recipes.page_all(pagination_query, foods=[food_id]).items)
|
||||
assert len(random_ordered[0]) == 15
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
@ -696,12 +696,12 @@ def test_random_order_recipe_search(
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(repo.page_all(pagination, search="soup").items)
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
|
||||
@ -713,7 +713,7 @@ def test_order_by_rating(user_tuple: tuple[TestUser, TestUser]):
|
||||
|
||||
recipes: list[Recipe] = []
|
||||
for i in range(3):
|
||||
slug = f"recipe-{i+1}-{random_string(5)}"
|
||||
slug = f"recipe-{i + 1}-{random_string(5)}"
|
||||
recipes.append(
|
||||
repo.create(
|
||||
Recipe(
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import Session
|
||||
@ -129,11 +129,11 @@ def test_random_order_search(
|
||||
page=1,
|
||||
per_page=-1,
|
||||
order_by="random",
|
||||
pagination_seed=str(datetime.now(timezone.utc)),
|
||||
pagination_seed=str(datetime.now(UTC)),
|
||||
order_direction=OrderDirection.asc,
|
||||
)
|
||||
random_ordered = []
|
||||
for _ in range(5):
|
||||
pagination.pagination_seed = str(datetime.now(timezone.utc))
|
||||
pagination.pagination_seed = str(datetime.now(UTC))
|
||||
random_ordered.append(repo.page_all(pagination, search="unit").items)
|
||||
assert not all(i == random_ordered[0] for i in random_ordered)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
from fastapi.testclient import TestClient
|
||||
from pydantic import UUID4
|
||||
@ -34,10 +34,10 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
|
||||
response_json = response.json()
|
||||
initial_event_count = len(response_json["items"])
|
||||
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="dinner", recipe_id=recipe_id
|
||||
).model_dump(by_alias=True)
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
|
||||
new_plan = CreatePlanEntry(date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=recipe_id).model_dump(
|
||||
by_alias=True
|
||||
)
|
||||
new_plan["date"] = datetime.now(UTC).date().isoformat()
|
||||
new_plan["recipeId"] = str(recipe_id)
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
@ -65,7 +65,7 @@ def test_new_mealplan_event(api_client: TestClient, unique_user: TestUser):
|
||||
response = api_client.get(api_routes.recipes_slug(recipe_name), headers=unique_user.token)
|
||||
new_recipe_data: dict = response.json()
|
||||
recipe = RecipeSummary.model_validate(new_recipe_data)
|
||||
assert recipe.last_made.date() == datetime.now(timezone.utc).date() # type: ignore
|
||||
assert recipe.last_made.date() == datetime.now(UTC).date() # type: ignore
|
||||
|
||||
# make sure nothing else was updated
|
||||
for data in [original_recipe_data, new_recipe_data]:
|
||||
@ -101,10 +101,10 @@ def test_new_mealplan_event_duplicates(api_client: TestClient, unique_user: Test
|
||||
response_json = response.json()
|
||||
initial_event_count = len(response_json["items"])
|
||||
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="dinner", recipe_id=recipe_id
|
||||
).model_dump(by_alias=True)
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
|
||||
new_plan = CreatePlanEntry(date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=recipe_id).model_dump(
|
||||
by_alias=True
|
||||
)
|
||||
new_plan["date"] = datetime.now(UTC).date().isoformat()
|
||||
new_plan["recipeId"] = str(recipe_id)
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
@ -148,9 +148,9 @@ def test_new_mealplan_events_with_multiple_recipes(api_client: TestClient, uniqu
|
||||
mealplan_count_by_recipe_id[recipe.id] = 0 # type: ignore
|
||||
for _ in range(random_int(1, 5)):
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="dinner", recipe_id=str(recipe.id)
|
||||
date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=str(recipe.id)
|
||||
).model_dump(by_alias=True)
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
|
||||
new_plan["date"] = datetime.now(UTC).date().isoformat()
|
||||
new_plan["recipeId"] = str(recipe.id)
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
@ -200,17 +200,17 @@ def test_preserve_future_made_date(api_client: TestClient, unique_user: TestUser
|
||||
recipe = RecipeSummary.model_validate(response.json())
|
||||
recipe_id = str(recipe.id)
|
||||
|
||||
future_dt = datetime.now(timezone.utc) + timedelta(days=random_int(1, 10))
|
||||
future_dt = datetime.now(UTC) + timedelta(days=random_int(1, 10))
|
||||
recipe.last_made = future_dt
|
||||
response = api_client.put(
|
||||
api_routes.recipes_slug(recipe.slug), json=utils.jsonify(recipe), headers=unique_user.token
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
new_plan = CreatePlanEntry(
|
||||
date=datetime.now(timezone.utc).date(), entry_type="dinner", recipe_id=recipe_id
|
||||
).model_dump(by_alias=True)
|
||||
new_plan["date"] = datetime.now(timezone.utc).date().isoformat()
|
||||
new_plan = CreatePlanEntry(date=datetime.now(UTC).date(), entry_type="dinner", recipe_id=recipe_id).model_dump(
|
||||
by_alias=True
|
||||
)
|
||||
new_plan["date"] = datetime.now(UTC).date().isoformat()
|
||||
new_plan["recipeId"] = str(recipe_id)
|
||||
|
||||
response = api_client.post(api_routes.households_mealplans, json=new_plan, headers=unique_user.token)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from mealie.schema.household.group_shopping_list import ShoppingListItemCreate, ShoppingListItemOut, ShoppingListSave
|
||||
from mealie.services.scheduler.tasks.delete_old_checked_shopping_list_items import (
|
||||
@ -44,7 +44,7 @@ def test_cleanup(unique_user: TestUser):
|
||||
for item in unchecked_items + checked_items:
|
||||
assert item in shopping_list.list_items
|
||||
|
||||
checked_items.sort(key=lambda x: x.updated_at or datetime.now(timezone.utc), reverse=True)
|
||||
checked_items.sort(key=lambda x: x.updated_at or datetime.now(UTC), reverse=True)
|
||||
expected_kept_items = unchecked_items + checked_items[:MAX_CHECKED_ITEMS]
|
||||
expected_deleted_items = checked_items[MAX_CHECKED_ITEMS:]
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from uuid import UUID
|
||||
|
||||
from pydantic import UUID4
|
||||
@ -31,7 +31,7 @@ def webhook_factory(
|
||||
name=name or random_string(),
|
||||
url=url or random_string(),
|
||||
webhook_type=webhook_type,
|
||||
scheduled_time=scheduled_time.time() if scheduled_time else datetime.now(timezone.utc).time(),
|
||||
scheduled_time=scheduled_time.time() if scheduled_time else datetime.now(UTC).time(),
|
||||
group_id=group_id,
|
||||
household_id=household_id,
|
||||
)
|
||||
@ -45,7 +45,7 @@ def test_get_scheduled_webhooks_filter_query(unique_user: TestUser):
|
||||
database = unique_user.repos
|
||||
expected: list[SaveWebhook] = []
|
||||
|
||||
start = datetime.now(timezone.utc)
|
||||
start = datetime.now(UTC)
|
||||
|
||||
for _ in range(5):
|
||||
new_item = webhook_factory(
|
||||
@ -65,7 +65,7 @@ def test_get_scheduled_webhooks_filter_query(unique_user: TestUser):
|
||||
expected.append(new_item)
|
||||
|
||||
event_bus_listener = WebhookEventListener(UUID(unique_user.group_id), UUID(unique_user.household_id))
|
||||
results = event_bus_listener.get_scheduled_webhooks(start, datetime.now(timezone.utc) + timedelta(minutes=5))
|
||||
results = event_bus_listener.get_scheduled_webhooks(start, datetime.now(UTC) + timedelta(minutes=5))
|
||||
|
||||
assert len(results) == len(expected)
|
||||
|
||||
@ -85,8 +85,8 @@ def test_event_listener_get_meals_by_date_range(unique_user: TestUser):
|
||||
"""
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=7)
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = datetime.now(UTC) - timedelta(days=7)
|
||||
end_date = datetime.now(UTC)
|
||||
|
||||
meal_1 = meal_repo.create(
|
||||
{
|
||||
@ -152,8 +152,8 @@ def test_event_listener_get_meals_by_date_range(unique_user: TestUser):
|
||||
def test_get_meals_by_date_range(unique_user: TestUser):
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=7)
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = datetime.now(UTC) - timedelta(days=7)
|
||||
end_date = datetime.now(UTC)
|
||||
|
||||
meal_1 = meal_repo.create(
|
||||
{
|
||||
@ -210,8 +210,8 @@ def test_get_meals_by_date_range_no_meals(unique_user: TestUser):
|
||||
"""
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
start_date = datetime.now(timezone.utc) - timedelta(days=7)
|
||||
end_date = datetime.now(timezone.utc)
|
||||
start_date = datetime.now(UTC) - timedelta(days=7)
|
||||
end_date = datetime.now(UTC)
|
||||
|
||||
meals_in_range = meal_repo.get_meals_by_date_range(start_date, end_date)
|
||||
|
||||
@ -224,7 +224,7 @@ def test_get_meals_by_date_range_single_day(unique_user: TestUser):
|
||||
"""
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
single_day = datetime.now(timezone.utc)
|
||||
single_day = datetime.now(UTC)
|
||||
|
||||
meal_1 = meal_repo.create(
|
||||
{
|
||||
@ -255,12 +255,12 @@ def test_get_meals_by_date_range_no_overlap(unique_user: TestUser):
|
||||
"""
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
start_date = datetime.now(timezone.utc) + timedelta(days=1)
|
||||
end_date = datetime.now(timezone.utc) + timedelta(days=10)
|
||||
start_date = datetime.now(UTC) + timedelta(days=1)
|
||||
end_date = datetime.now(UTC) + timedelta(days=10)
|
||||
|
||||
meal_1 = meal_repo.create(
|
||||
{
|
||||
"date": datetime.now(timezone.utc) - timedelta(days=5),
|
||||
"date": datetime.now(UTC) - timedelta(days=5),
|
||||
"entry_type": "dinner",
|
||||
"title": "Meal Outside Range",
|
||||
"text": "This meal is outside the tested date range",
|
||||
@ -289,7 +289,7 @@ def test_get_meals_by_date_range_invalid_date_range(unique_user: TestUser):
|
||||
"""
|
||||
meal_repo = unique_user.repos.meals
|
||||
|
||||
start_date = datetime.now(timezone.utc)
|
||||
start_date = datetime.now(UTC)
|
||||
end_date = start_date - timedelta(days=1)
|
||||
|
||||
meals_in_range = meal_repo.get_meals_by_date_range(start_date, end_date)
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timedelta, timezone
|
||||
from datetime import UTC, datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
@ -65,7 +65,7 @@ def test_lock_unlocker_user(unique_user: TestUser) -> None:
|
||||
assert not unlocked_user.is_locked
|
||||
|
||||
# Sanity check that the is_locked property is working
|
||||
user.locked_at = datetime.now(timezone.utc) - timedelta(days=2)
|
||||
user.locked_at = datetime.now(UTC) - timedelta(days=2)
|
||||
assert not user.is_locked
|
||||
|
||||
|
||||
@ -98,7 +98,7 @@ def test_reset_locked_users(unique_user: TestUser, use_task: bool) -> None:
|
||||
assert user.login_attemps == 5
|
||||
|
||||
# Test that the locked user is unlocked by reset
|
||||
user.locked_at = datetime.now(timezone.utc) - timedelta(days=2)
|
||||
user.locked_at = datetime.now(UTC) - timedelta(days=2)
|
||||
database.users.update(user.id, user)
|
||||
if use_task:
|
||||
unlocked = locked_user_reset()
|
||||
|
@ -39,9 +39,9 @@ def test_alembic_revisions_are_in_order() -> None:
|
||||
last = None
|
||||
for migration in migrations:
|
||||
if last is not None:
|
||||
assert (
|
||||
last.revision == migration.down_revision
|
||||
), f"{last.revision} != {migration.down_revision} for {migration.path}"
|
||||
assert last.revision == migration.down_revision, (
|
||||
f"{last.revision} != {migration.down_revision} for {migration.path}"
|
||||
)
|
||||
|
||||
last = migration
|
||||
last = migration
|
||||
|
@ -1,4 +1,4 @@
|
||||
from datetime import datetime, timezone
|
||||
from datetime import UTC, datetime
|
||||
from uuid import uuid4
|
||||
|
||||
import pytest
|
||||
@ -7,7 +7,7 @@ from mealie.schema.meal_plan.new_meal import CreatePlanEntry
|
||||
|
||||
|
||||
def test_create_plan_with_title():
|
||||
entry = CreatePlanEntry(date=datetime.now(timezone.utc).date(), title="Test Title")
|
||||
entry = CreatePlanEntry(date=datetime.now(UTC).date(), title="Test Title")
|
||||
|
||||
assert entry.title == "Test Title"
|
||||
assert entry.recipe_id is None
|
||||
@ -15,7 +15,7 @@ def test_create_plan_with_title():
|
||||
|
||||
def test_create_plan_with_slug():
|
||||
uuid = uuid4()
|
||||
entry = CreatePlanEntry(date=datetime.now(timezone.utc).date(), recipe_id=uuid)
|
||||
entry = CreatePlanEntry(date=datetime.now(UTC).date(), recipe_id=uuid)
|
||||
|
||||
assert entry.recipe_id == uuid
|
||||
assert entry.title == ""
|
||||
@ -23,4 +23,4 @@ def test_create_plan_with_slug():
|
||||
|
||||
def test_slug_or_title_validation():
|
||||
with pytest.raises(ValueError):
|
||||
CreatePlanEntry(date=datetime.now(timezone.utc).date(), slug="", title="")
|
||||
CreatePlanEntry(date=datetime.now(UTC).date(), slug="", title="")
|
||||
|
Loading…
x
Reference in New Issue
Block a user