Coverage for opt/mealie/lib/python3.12/site-packages/mealie/routes/recipe/recipe_crud_routes.py: 59%
265 statements
« prev ^ index » next coverage.py v7.10.6, created at 2025-11-25 15:32 +0000
« prev ^ index » next coverage.py v7.10.6, created at 2025-11-25 15:32 +0000
1from collections import defaultdict 1b
2from shutil import copyfileobj 1b
3from uuid import UUID 1b
5import orjson 1b
6import sqlalchemy 1b
7import sqlalchemy.exc 1b
8from fastapi import ( 1b
9 BackgroundTasks,
10 Depends,
11 File,
12 Form,
13 HTTPException,
14 Path,
15 Query,
16 Request,
17 status,
18)
19from fastapi.datastructures import UploadFile 1b
20from pydantic import UUID4 1b
21from slugify import slugify 1b
23from mealie.core import exceptions 1b
24from mealie.core.dependencies import ( 1b
25 get_temporary_zip_path,
26)
27from mealie.pkgs import cache 1b
28from mealie.repos.all_repositories import get_repositories 1b
29from mealie.routes._base import controller 1b
30from mealie.routes._base.routers import MealieCrudRoute, UserAPIRouter 1b
31from mealie.schema.cookbook.cookbook import ReadCookBook 1b
32from mealie.schema.make_dependable import make_dependable 1b
33from mealie.schema.recipe import Recipe, ScrapeRecipe, ScrapeRecipeData 1b
34from mealie.schema.recipe.recipe import ( 1b
35 CreateRecipe,
36 CreateRecipeByUrlBulk,
37 RecipeLastMade,
38 RecipeSummary,
39)
40from mealie.schema.recipe.recipe_asset import RecipeAsset 1b
41from mealie.schema.recipe.recipe_scraper import ScrapeRecipeTest 1b
42from mealie.schema.recipe.recipe_suggestion import RecipeSuggestionQuery, RecipeSuggestionResponse 1b
43from mealie.schema.recipe.request_helpers import ( 1b
44 RecipeDuplicate,
45 UpdateImageResponse,
46)
47from mealie.schema.response import PaginationBase, PaginationQuery 1b
48from mealie.schema.response.pagination import RecipeSearchQuery 1b
49from mealie.schema.response.responses import ErrorResponse 1b
50from mealie.services import urls 1b
51from mealie.services.event_bus_service.event_types import ( 1b
52 EventOperation,
53 EventRecipeBulkData,
54 EventRecipeBulkReportData,
55 EventRecipeData,
56 EventTypes,
57)
58from mealie.services.recipe.recipe_data_service import ( 1b
59 InvalidDomainError,
60 NotAnImageError,
61 RecipeDataService,
62)
63from mealie.services.scraper.recipe_bulk_scraper import RecipeBulkScraperService 1b
64from mealie.services.scraper.scraped_extras import ScraperContext 1b
65from mealie.services.scraper.scraper import create_from_html 1b
66from mealie.services.scraper.scraper_strategies import ( 1b
67 ForceTimeoutException,
68 RecipeScraperOpenAI,
69 RecipeScraperPackage,
70)
72from ._base import BaseRecipeController, JSONBytes 1b
74router = UserAPIRouter(prefix="/recipes", route_class=MealieCrudRoute) 1b
77@controller(router) 1b
78class RecipeController(BaseRecipeController): 1b
79 def handle_exceptions(self, ex: Exception) -> None: 1b
80 thrownType = type(ex) 1cdefghijka
82 if thrownType == exceptions.PermissionDenied: 82 ↛ 83line 82 didn't jump to line 83 because the condition on line 82 was never true1cdefghijka
83 self.logger.error("Permission Denied on recipe controller action")
84 raise HTTPException(
85 status_code=status.HTTP_403_FORBIDDEN, detail=ErrorResponse.respond(message="Permission Denied")
86 )
87 elif thrownType == exceptions.NoEntryFound: 1cdefghijka
88 self.logger.error("No Entry Found on recipe controller action") 1cdefghijka
89 raise HTTPException( 1cdefghijka
90 status_code=status.HTTP_404_NOT_FOUND, detail=ErrorResponse.respond(message="No Entry Found")
91 )
92 elif thrownType == sqlalchemy.exc.IntegrityError: 92 ↛ 93line 92 didn't jump to line 93 because the condition on line 92 was never true
93 self.logger.error("SQL Integrity Error on recipe controller action")
94 raise HTTPException(
95 status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorResponse.respond(message="Recipe already exists")
96 )
97 elif thrownType == exceptions.SlugError: 97 ↛ 98line 97 didn't jump to line 98 because the condition on line 97 was never true
98 self.logger.error("Failed to generate a valid slug from recipe name")
99 raise HTTPException(
100 status_code=status.HTTP_400_BAD_REQUEST,
101 detail=ErrorResponse.respond(message="Unable to generate recipe slug"),
102 )
103 else:
104 self.logger.error("Unknown Error on recipe controller action")
105 self.logger.exception(ex)
106 raise HTTPException(
107 status_code=500, detail=ErrorResponse.respond(message="Unknown Error", exception=ex.__class__.__name__)
108 )
110 # =======================================================================
111 # URL Scraping Operations
113 @router.post("/test-scrape-url") 1b
114 async def test_parse_recipe_url(self, data: ScrapeRecipeTest): 1b
115 # Debugger should produce the same result as the scraper sees before cleaning
116 ScraperClass = RecipeScraperOpenAI if data.use_openai else RecipeScraperPackage
117 try:
118 if scraped_data := await ScraperClass(data.url, self.translator).scrape_url(): 118 ↛ anywhereline 118 didn't jump anywhere: it always raised an exception.
119 return scraped_data.schema.data
120 except ForceTimeoutException as e:
121 raise HTTPException(
122 status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out")
123 ) from e
125 return "recipe_scrapers was unable to scrape this URL"
127 @router.post("/create/html-or-json", status_code=201) 1b
128 async def create_recipe_from_html_or_json(self, req: ScrapeRecipeData): 1b
129 """Takes in raw HTML or a https://schema.org/Recipe object as a JSON string and parses it like a URL"""
131 if req.data.startswith("{"): 131 ↛ 132line 131 didn't jump to line 132 because the condition on line 131 was never true
132 req.data = RecipeScraperPackage.ld_json_to_html(req.data)
134 return await self._create_recipe_from_web(req)
136 @router.post("/create/url", status_code=201, response_model=str) 1b
137 async def parse_recipe_url(self, req: ScrapeRecipe): 1b
138 """Takes in a URL and attempts to scrape data and load it into the database"""
140 return await self._create_recipe_from_web(req)
142 async def _create_recipe_from_web(self, req: ScrapeRecipe | ScrapeRecipeData): 1b
143 if isinstance(req, ScrapeRecipeData):
144 html = req.data
145 url = ""
146 else:
147 html = None
148 url = req.url
150 try:
151 recipe, extras = await create_from_html(url, self.translator, html)
152 except ForceTimeoutException as e:
153 raise HTTPException(
154 status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out")
155 ) from e
157 if req.include_tags:
158 ctx = ScraperContext(self.repos)
160 recipe.tags = extras.use_tags(ctx) # type: ignore
162 new_recipe = self.service.create_one(recipe)
164 if new_recipe:
165 self.publish_event(
166 event_type=EventTypes.recipe_created,
167 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug),
168 group_id=new_recipe.group_id,
169 household_id=new_recipe.household_id,
170 message=self.t(
171 "notifications.generic-created-with-url",
172 name=new_recipe.name,
173 url=urls.recipe_url(self.group.slug, new_recipe.slug, self.settings.BASE_URL),
174 ),
175 )
177 return new_recipe.slug
179 @router.post("/create/url/bulk", status_code=202) 1b
180 def parse_recipe_url_bulk(self, bulk: CreateRecipeByUrlBulk, bg_tasks: BackgroundTasks): 1b
181 """Takes in a URL and attempts to scrape data and load it into the database"""
182 bulk_scraper = RecipeBulkScraperService(self.service, self.repos, self.group, self.translator)
183 report_id = bulk_scraper.get_report_id()
184 bg_tasks.add_task(bulk_scraper.scrape, bulk)
186 self.publish_event(
187 event_type=EventTypes.recipe_created,
188 document_data=EventRecipeBulkReportData(operation=EventOperation.create, report_id=report_id),
189 group_id=self.group_id,
190 household_id=self.household_id,
191 )
193 return {"reportId": report_id}
195 # ==================================================================================================================
196 # Other Create Operations
198 @router.post("/create/zip", status_code=201) 1b
199 def create_recipe_from_zip(self, archive: UploadFile = File(...)): 1b
200 """Create recipe from archive"""
201 with get_temporary_zip_path() as temp_path:
202 recipe = self.service.create_from_zip(archive, temp_path)
203 self.publish_event(
204 event_type=EventTypes.recipe_created,
205 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=recipe.slug),
206 group_id=recipe.group_id,
207 household_id=recipe.household_id,
208 )
210 return recipe.slug
212 @router.post("/create/image", status_code=201) 1b
213 async def create_recipe_from_image( 1b
214 self,
215 images: list[UploadFile] = File(...),
216 translate_language: str | None = Query(None, alias="translateLanguage"),
217 ):
218 """
219 Create a recipe from an image using OpenAI.
220 Optionally specify a language for it to translate the recipe to.
221 """
223 if not (self.settings.OPENAI_ENABLED and self.settings.OPENAI_ENABLE_IMAGE_SERVICES): 223 ↛ 229line 223 didn't jump to line 229 because the condition on line 223 was always true
224 raise HTTPException(
225 status_code=400,
226 detail=ErrorResponse.respond("OpenAI image services are not enabled"),
227 )
229 recipe = await self.service.create_from_images(images, translate_language)
230 self.publish_event(
231 event_type=EventTypes.recipe_created,
232 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=recipe.slug),
233 group_id=recipe.group_id,
234 household_id=recipe.household_id,
235 )
237 return recipe.slug
239 # ==================================================================================================================
240 # CRUD Operations
242 @router.get("", response_model=PaginationBase[RecipeSummary]) 1b
243 def get_all( 1b
244 self,
245 request: Request,
246 q: PaginationQuery = Depends(make_dependable(PaginationQuery)),
247 search_query: RecipeSearchQuery = Depends(make_dependable(RecipeSearchQuery)),
248 categories: list[UUID4 | str] | None = Query(None),
249 tags: list[UUID4 | str] | None = Query(None),
250 tools: list[UUID4 | str] | None = Query(None),
251 foods: list[UUID4 | str] | None = Query(None),
252 households: list[UUID4 | str] | None = Query(None),
253 ):
254 cookbook_data: ReadCookBook | None = None 1c
255 if search_query.cookbook: 255 ↛ 256line 255 didn't jump to line 256 because the condition on line 255 was never true1c
256 if isinstance(search_query.cookbook, UUID):
257 cb_match_attr = "id"
258 else:
259 try:
260 UUID(search_query.cookbook)
261 cb_match_attr = "id"
262 except ValueError:
263 cb_match_attr = "slug"
264 cookbook_data = self.group_cookbooks.get_one(search_query.cookbook, cb_match_attr)
266 if cookbook_data is None:
267 raise HTTPException(status_code=404, detail="cookbook not found")
269 # We use "group_recipes" here so we can return all recipes regardless of household. The query filter can
270 # include a household_id to filter by household.
271 # We use "by_user" so we can sort favorites and other user-specific data correctly.
272 pagination_response = self.group_recipes.by_user(self.user.id).page_all( 1c
273 pagination=q,
274 cookbook=cookbook_data,
275 categories=categories,
276 tags=tags,
277 tools=tools,
278 foods=foods,
279 households=households,
280 require_all_categories=search_query.require_all_categories,
281 require_all_tags=search_query.require_all_tags,
282 require_all_tools=search_query.require_all_tools,
283 require_all_foods=search_query.require_all_foods,
284 search=search_query.search,
285 )
287 # merge default pagination with the request's query params
288 query_params = q.model_dump() | {**request.query_params} 1c
289 pagination_response.set_pagination_guides( 1c
290 router.url_path_for("get_all"),
291 {k: v for k, v in query_params.items() if v is not None},
292 )
294 json_compatible_response = orjson.dumps(pagination_response.model_dump(by_alias=True)) 1c
296 # Response is returned directly, to avoid validation and improve performance
297 return JSONBytes(content=json_compatible_response) 1c
299 @router.get("/suggestions", response_model=RecipeSuggestionResponse) 1b
300 def suggest_recipes( 1b
301 self,
302 q: RecipeSuggestionQuery = Depends(make_dependable(RecipeSuggestionQuery)),
303 foods: list[UUID4] | None = Query(None),
304 tools: list[UUID4] | None = Query(None),
305 ) -> RecipeSuggestionResponse:
306 group_recipes_by_user = get_repositories( 1ca
307 self.session, group_id=self.group_id, household_id=None
308 ).recipes.by_user(self.user.id)
310 recipes = group_recipes_by_user.find_suggested_recipes(q, foods, tools) 1ca
311 response = RecipeSuggestionResponse(items=recipes) 1ca
312 json_compatible_response = orjson.dumps(response.model_dump(by_alias=True)) 1ca
314 # Response is returned directly, to avoid validation and improve performance
315 return JSONBytes(content=json_compatible_response) 1ca
317 @router.get("/{slug}", response_model=Recipe) 1b
318 def get_one(self, slug: str = Path(..., description="A recipe's slug or id")): 1b
319 """Takes in a recipe's slug or id and returns all data for a recipe"""
320 try:
321 recipe = self.service.get_one(slug)
322 except Exception as e:
323 self.handle_exceptions(e)
324 return None
326 return recipe
328 @router.post("", status_code=201, response_model=str) 1b
329 def create_one(self, data: CreateRecipe) -> str | None: 1b
330 """Takes in a JSON string and loads data into the database as a new entry"""
331 try:
332 new_recipe = self.service.create_one(data)
333 except Exception as e:
334 self.handle_exceptions(e)
335 return None
337 if new_recipe:
338 self.publish_event(
339 event_type=EventTypes.recipe_created,
340 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug),
341 group_id=new_recipe.group_id,
342 household_id=new_recipe.household_id,
343 message=self.t(
344 "notifications.generic-created-with-url",
345 name=new_recipe.name,
346 url=urls.recipe_url(self.group.slug, new_recipe.slug, self.settings.BASE_URL),
347 ),
348 )
350 return new_recipe.slug
352 @router.post("/{slug}/duplicate", status_code=201, response_model=Recipe) 1b
353 def duplicate_one(self, slug: str, req: RecipeDuplicate) -> Recipe: 1b
354 """Duplicates a recipe with a new custom name if given"""
355 try: 1defghijka
356 new_recipe = self.service.duplicate_one(slug, req) 1defghijka
357 except Exception as e: 1defghijka
358 self.handle_exceptions(e) 1defghijka
360 if new_recipe:
361 self.publish_event(
362 event_type=EventTypes.recipe_created,
363 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug),
364 group_id=new_recipe.group_id,
365 household_id=new_recipe.household_id,
366 message=self.t(
367 "notifications.generic-duplicated",
368 name=new_recipe.name,
369 ),
370 )
372 return new_recipe
374 @router.put("/{slug}") 1b
375 def update_one(self, slug: str, data: Recipe): 1b
376 """Updates a recipe by existing slug and data."""
377 try:
378 recipe = self.service.update_one(slug, data)
379 except Exception as e:
380 self.handle_exceptions(e)
382 if recipe:
383 self.publish_event(
384 event_type=EventTypes.recipe_updated,
385 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
386 group_id=recipe.group_id,
387 household_id=recipe.household_id,
388 message=self.t(
389 "notifications.generic-updated-with-url",
390 name=recipe.name,
391 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL),
392 ),
393 )
395 return recipe
397 @router.put("") 1b
398 def update_many(self, data: list[Recipe]): 1b
399 updated_by_group_and_household: defaultdict[UUID4, defaultdict[UUID4, list[Recipe]]] = defaultdict(
400 lambda: defaultdict(list)
401 )
402 for recipe in data:
403 r = self.service.update_one(recipe.id, recipe) # type: ignore
404 updated_by_group_and_household[r.group_id][r.household_id].append(r)
406 all_updated: list[Recipe] = []
407 if updated_by_group_and_household: 407 ↛ 408line 407 didn't jump to line 408 because the condition on line 407 was never true
408 for group_id, household_dict in updated_by_group_and_household.items():
409 for household_id, updated_recipes in household_dict.items():
410 all_updated.extend(updated_recipes)
411 self.publish_event(
412 event_type=EventTypes.recipe_updated,
413 document_data=EventRecipeBulkData(
414 operation=EventOperation.update, recipe_slugs=[r.slug for r in updated_recipes]
415 ),
416 group_id=group_id,
417 household_id=household_id,
418 )
420 return all_updated
422 @router.patch("/{slug}") 1b
423 def patch_one(self, slug: str, data: Recipe): 1b
424 """Updates a recipe by existing slug and data."""
425 try:
426 recipe = self.service.patch_one(slug, data)
427 except Exception as e:
428 self.handle_exceptions(e)
430 if recipe:
431 self.publish_event(
432 event_type=EventTypes.recipe_updated,
433 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
434 group_id=recipe.group_id,
435 household_id=recipe.household_id,
436 message=self.t(
437 "notifications.generic-updated-with-url",
438 name=recipe.name,
439 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL),
440 ),
441 )
443 return recipe
445 @router.patch("") 1b
446 def patch_many(self, data: list[Recipe]): 1b
447 updated_by_group_and_household: defaultdict[UUID4, defaultdict[UUID4, list[Recipe]]] = defaultdict(
448 lambda: defaultdict(list)
449 )
450 for recipe in data:
451 r = self.service.patch_one(recipe.id, recipe) # type: ignore
452 updated_by_group_and_household[r.group_id][r.household_id].append(r)
454 all_updated: list[Recipe] = []
455 if updated_by_group_and_household: 455 ↛ 456line 455 didn't jump to line 456 because the condition on line 455 was never true
456 for group_id, household_dict in updated_by_group_and_household.items():
457 for household_id, updated_recipes in household_dict.items():
458 all_updated.extend(updated_recipes)
459 self.publish_event(
460 event_type=EventTypes.recipe_updated,
461 document_data=EventRecipeBulkData(
462 operation=EventOperation.update, recipe_slugs=[r.slug for r in updated_recipes]
463 ),
464 group_id=group_id,
465 household_id=household_id,
466 )
468 return all_updated
470 @router.patch("/{slug}/last-made") 1b
471 def update_last_made(self, slug: str, data: RecipeLastMade): 1b
472 """Update a recipe's last made timestamp"""
474 try:
475 recipe = self.service.update_last_made(slug, data.timestamp)
476 except Exception as e:
477 self.handle_exceptions(e)
479 if recipe:
480 self.publish_event(
481 event_type=EventTypes.recipe_updated,
482 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug),
483 group_id=recipe.group_id,
484 household_id=recipe.household_id,
485 message=self.t(
486 "notifications.generic-updated-with-url",
487 name=recipe.name,
488 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL),
489 ),
490 )
492 return recipe
494 @router.delete("/{slug}") 1b
495 def delete_one(self, slug: str): 1b
496 """Deletes a recipe by slug"""
497 try: 1c
498 recipe = self.service.delete_one(slug) 1c
499 except Exception as e: 1c
500 self.handle_exceptions(e) 1c
502 if recipe:
503 self.publish_event(
504 event_type=EventTypes.recipe_deleted,
505 document_data=EventRecipeData(operation=EventOperation.delete, recipe_slug=recipe.slug),
506 group_id=recipe.group_id,
507 household_id=recipe.household_id,
508 message=self.t("notifications.generic-deleted", name=recipe.name),
509 )
511 return recipe
513 # ==================================================================================================================
514 # Image and Assets
516 @router.post("/{slug}/image", tags=["Recipe: Images and Assets"]) 1b
517 async def scrape_image_url(self, slug: str, url: ScrapeRecipe): 1b
518 recipe = self.mixins.get_one(slug)
519 data_service = RecipeDataService(recipe.id)
521 try:
522 await data_service.scrape_image(url.url)
523 except NotAnImageError as e:
524 raise HTTPException(
525 status_code=400,
526 detail=ErrorResponse.respond("Url is not an image"),
527 ) from e
528 except InvalidDomainError as e:
529 raise HTTPException(
530 status_code=400,
531 detail=ErrorResponse.respond("Url is not from an allowed domain"),
532 ) from e
534 recipe.image = cache.cache_key.new_key()
535 self.service.update_one(recipe.slug, recipe)
537 @router.put("/{slug}/image", response_model=UpdateImageResponse, tags=["Recipe: Images and Assets"]) 1b
538 def update_recipe_image(self, slug: str, image: bytes = File(...), extension: str = Form(...)): 1b
539 try:
540 new_version = self.service.update_recipe_image(slug, image, extension)
541 return UpdateImageResponse(image=new_version)
542 except Exception as e:
543 self.handle_exceptions(e)
544 return None
546 @router.post("/{slug}/assets", response_model=RecipeAsset, tags=["Recipe: Images and Assets"]) 1b
547 def upload_recipe_asset( 1b
548 self,
549 slug: str,
550 name: str = Form(...),
551 icon: str = Form(...),
552 extension: str = Form(...),
553 file: UploadFile = File(...),
554 ):
555 """Upload a file to store as a recipe asset"""
556 if "." in extension: 556 ↛ 557line 556 didn't jump to line 557 because the condition on line 556 was never true
557 extension = extension.split(".")[-1]
559 file_slug = slugify(name)
560 if not extension or not file_slug: 560 ↛ 563line 560 didn't jump to line 563 because the condition on line 560 was always true
561 raise HTTPException(status_code=400, detail="Missing required fields")
563 file_name = f"{file_slug}.{extension}"
564 asset_in = RecipeAsset(name=name, icon=icon, file_name=file_name)
566 recipe = self.service.get_one(slug)
568 dest = recipe.asset_dir / file_name
570 # Ensure path is relative to the recipe's asset directory
571 if dest.absolute().parent != recipe.asset_dir:
572 raise HTTPException(
573 status_code=400,
574 detail=f"File name {file_name} or extension {extension} not valid",
575 )
577 with dest.open("wb") as buffer:
578 copyfileobj(file.file, buffer)
580 if not dest.is_file():
581 raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR)
583 if recipe.assets is not None:
584 recipe.assets.append(asset_in)
586 self.service.update_one(slug, recipe)
588 return asset_in