Coverage for opt/mealie/lib/python3.12/site-packages/mealie/routes/recipe/recipe_crud_routes.py: 66%

265 statements  

« prev     ^ index     » next       coverage.py v7.10.6, created at 2025-12-05 15:32 +0000

1from collections import defaultdict 1a

2from shutil import copyfileobj 1a

3from uuid import UUID 1a

4 

5import orjson 1a

6import sqlalchemy 1a

7import sqlalchemy.exc 1a

8from fastapi import ( 1a

9 BackgroundTasks, 

10 Depends, 

11 File, 

12 Form, 

13 HTTPException, 

14 Path, 

15 Query, 

16 Request, 

17 status, 

18) 

19from fastapi.datastructures import UploadFile 1a

20from pydantic import UUID4 1a

21from slugify import slugify 1a

22 

23from mealie.core import exceptions 1a

24from mealie.core.dependencies import ( 1a

25 get_temporary_zip_path, 

26) 

27from mealie.pkgs import cache 1a

28from mealie.repos.all_repositories import get_repositories 1a

29from mealie.routes._base import controller 1a

30from mealie.routes._base.routers import MealieCrudRoute, UserAPIRouter 1a

31from mealie.schema.cookbook.cookbook import ReadCookBook 1a

32from mealie.schema.make_dependable import make_dependable 1a

33from mealie.schema.recipe import Recipe, ScrapeRecipe, ScrapeRecipeData 1a

34from mealie.schema.recipe.recipe import ( 1a

35 CreateRecipe, 

36 CreateRecipeByUrlBulk, 

37 RecipeLastMade, 

38 RecipeSummary, 

39) 

40from mealie.schema.recipe.recipe_asset import RecipeAsset 1a

41from mealie.schema.recipe.recipe_scraper import ScrapeRecipeTest 1a

42from mealie.schema.recipe.recipe_suggestion import RecipeSuggestionQuery, RecipeSuggestionResponse 1a

43from mealie.schema.recipe.request_helpers import ( 1a

44 RecipeDuplicate, 

45 UpdateImageResponse, 

46) 

47from mealie.schema.response import PaginationBase, PaginationQuery 1a

48from mealie.schema.response.pagination import RecipeSearchQuery 1a

49from mealie.schema.response.responses import ErrorResponse 1a

50from mealie.services import urls 1a

51from mealie.services.event_bus_service.event_types import ( 1a

52 EventOperation, 

53 EventRecipeBulkData, 

54 EventRecipeBulkReportData, 

55 EventRecipeData, 

56 EventTypes, 

57) 

58from mealie.services.recipe.recipe_data_service import ( 1a

59 InvalidDomainError, 

60 NotAnImageError, 

61 RecipeDataService, 

62) 

63from mealie.services.scraper.recipe_bulk_scraper import RecipeBulkScraperService 1a

64from mealie.services.scraper.scraped_extras import ScraperContext 1a

65from mealie.services.scraper.scraper import create_from_html 1a

66from mealie.services.scraper.scraper_strategies import ( 1a

67 ForceTimeoutException, 

68 RecipeScraperOpenAI, 

69 RecipeScraperPackage, 

70) 

71 

72from ._base import BaseRecipeController, JSONBytes 1a

73 

74router = UserAPIRouter(prefix="/recipes", route_class=MealieCrudRoute) 1a

75 

76 

77@controller(router) 1a

78class RecipeController(BaseRecipeController): 1a

79 def handle_exceptions(self, ex: Exception) -> None: 1a

80 thrownType = type(ex) 1fchb

81 

82 if thrownType == exceptions.PermissionDenied: 82 ↛ 83line 82 didn't jump to line 83 because the condition on line 82 was never true1fchb

83 self.logger.error("Permission Denied on recipe controller action") 

84 raise HTTPException( 

85 status_code=status.HTTP_403_FORBIDDEN, detail=ErrorResponse.respond(message="Permission Denied") 

86 ) 

87 elif thrownType == exceptions.NoEntryFound: 1fchb

88 self.logger.error("No Entry Found on recipe controller action") 1fhb

89 raise HTTPException( 1fhb

90 status_code=status.HTTP_404_NOT_FOUND, detail=ErrorResponse.respond(message="No Entry Found") 

91 ) 

92 elif thrownType == sqlalchemy.exc.IntegrityError: 1c

93 self.logger.error("SQL Integrity Error on recipe controller action") 1c

94 raise HTTPException( 1c

95 status_code=status.HTTP_400_BAD_REQUEST, detail=ErrorResponse.respond(message="Recipe already exists") 

96 ) 

97 elif thrownType == exceptions.SlugError: 1c

98 self.logger.error("Failed to generate a valid slug from recipe name") 1c

99 raise HTTPException( 1c

100 status_code=status.HTTP_400_BAD_REQUEST, 

101 detail=ErrorResponse.respond(message="Unable to generate recipe slug"), 

102 ) 

103 else: 

104 self.logger.error("Unknown Error on recipe controller action") 1c

105 self.logger.exception(ex) 1c

106 raise HTTPException( 1c

107 status_code=500, detail=ErrorResponse.respond(message="Unknown Error", exception=ex.__class__.__name__) 

108 ) 

109 

110 # ======================================================================= 

111 # URL Scraping Operations 

112 

113 @router.post("/test-scrape-url") 1a

114 async def test_parse_recipe_url(self, data: ScrapeRecipeTest): 1a

115 # Debugger should produce the same result as the scraper sees before cleaning 

116 ScraperClass = RecipeScraperOpenAI if data.use_openai else RecipeScraperPackage 

117 try: 

118 if scraped_data := await ScraperClass(data.url, self.translator).scrape_url(): 118 ↛ 119line 118 didn't jump to line 119 because the condition on line 118 was never true

119 return scraped_data.schema.data 

120 except ForceTimeoutException as e: 

121 raise HTTPException( 

122 status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out") 

123 ) from e 

124 

125 return "recipe_scrapers was unable to scrape this URL" 

126 

127 @router.post("/create/html-or-json", status_code=201) 1a

128 async def create_recipe_from_html_or_json(self, req: ScrapeRecipeData): 1a

129 """Takes in raw HTML or a https://schema.org/Recipe object as a JSON string and parses it like a URL""" 

130 

131 if req.data.startswith("{"): 131 ↛ 132line 131 didn't jump to line 132 because the condition on line 131 was never true1ib

132 req.data = RecipeScraperPackage.ld_json_to_html(req.data) 

133 

134 return await self._create_recipe_from_web(req) 1ib

135 

136 @router.post("/create/url", status_code=201, response_model=str) 1a

137 async def parse_recipe_url(self, req: ScrapeRecipe): 1a

138 """Takes in a URL and attempts to scrape data and load it into the database""" 

139 

140 return await self._create_recipe_from_web(req) 1jgb

141 

142 async def _create_recipe_from_web(self, req: ScrapeRecipe | ScrapeRecipeData): 1a

143 if isinstance(req, ScrapeRecipeData): 1jigb

144 html = req.data 1ib

145 url = "" 1ib

146 else: 

147 html = None 1jgb

148 url = req.url 1jgb

149 

150 try: 1jigb

151 recipe, extras = await create_from_html(url, self.translator, html) 1jigb

152 except ForceTimeoutException as e: 1jigb

153 raise HTTPException( 

154 status_code=408, detail=ErrorResponse.respond(message="Recipe Scraping Timed Out") 

155 ) from e 

156 

157 if req.include_tags: 

158 ctx = ScraperContext(self.repos) 

159 

160 recipe.tags = extras.use_tags(ctx) # type: ignore 

161 

162 new_recipe = self.service.create_one(recipe) 

163 

164 if new_recipe: 

165 self.publish_event( 

166 event_type=EventTypes.recipe_created, 

167 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug), 

168 group_id=new_recipe.group_id, 

169 household_id=new_recipe.household_id, 

170 message=self.t( 

171 "notifications.generic-created-with-url", 

172 name=new_recipe.name, 

173 url=urls.recipe_url(self.group.slug, new_recipe.slug, self.settings.BASE_URL), 

174 ), 

175 ) 

176 

177 return new_recipe.slug 

178 

179 @router.post("/create/url/bulk", status_code=202) 1a

180 def parse_recipe_url_bulk(self, bulk: CreateRecipeByUrlBulk, bg_tasks: BackgroundTasks): 1a

181 """Takes in a URL and attempts to scrape data and load it into the database""" 

182 bulk_scraper = RecipeBulkScraperService(self.service, self.repos, self.group, self.translator) 1g

183 report_id = bulk_scraper.get_report_id() 1g

184 bg_tasks.add_task(bulk_scraper.scrape, bulk) 1g

185 

186 self.publish_event( 1g

187 event_type=EventTypes.recipe_created, 

188 document_data=EventRecipeBulkReportData(operation=EventOperation.create, report_id=report_id), 

189 group_id=self.group_id, 

190 household_id=self.household_id, 

191 ) 

192 

193 return {"reportId": report_id} 1g

194 

195 # ================================================================================================================== 

196 # Other Create Operations 

197 

198 @router.post("/create/zip", status_code=201) 1a

199 def create_recipe_from_zip(self, archive: UploadFile = File(...)): 1a

200 """Create recipe from archive""" 

201 with get_temporary_zip_path() as temp_path: 1gkb

202 recipe = self.service.create_from_zip(archive, temp_path) 1gkb

203 self.publish_event( 

204 event_type=EventTypes.recipe_created, 

205 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=recipe.slug), 

206 group_id=recipe.group_id, 

207 household_id=recipe.household_id, 

208 ) 

209 

210 return recipe.slug 

211 

212 @router.post("/create/image", status_code=201) 1a

213 async def create_recipe_from_image( 1a

214 self, 

215 images: list[UploadFile] = File(...), 

216 translate_language: str | None = Query(None, alias="translateLanguage"), 

217 ): 

218 """ 

219 Create a recipe from an image using OpenAI. 

220 Optionally specify a language for it to translate the recipe to. 

221 """ 

222 

223 if not (self.settings.OPENAI_ENABLED and self.settings.OPENAI_ENABLE_IMAGE_SERVICES): 223 ↛ 229line 223 didn't jump to line 229 because the condition on line 223 was always true

224 raise HTTPException( 

225 status_code=400, 

226 detail=ErrorResponse.respond("OpenAI image services are not enabled"), 

227 ) 

228 

229 recipe = await self.service.create_from_images(images, translate_language) 

230 self.publish_event( 

231 event_type=EventTypes.recipe_created, 

232 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=recipe.slug), 

233 group_id=recipe.group_id, 

234 household_id=recipe.household_id, 

235 ) 

236 

237 return recipe.slug 

238 

239 # ================================================================================================================== 

240 # CRUD Operations 

241 

242 @router.get("", response_model=PaginationBase[RecipeSummary]) 1a

243 def get_all( 1a

244 self, 

245 request: Request, 

246 q: PaginationQuery = Depends(make_dependable(PaginationQuery)), 

247 search_query: RecipeSearchQuery = Depends(make_dependable(RecipeSearchQuery)), 

248 categories: list[UUID4 | str] | None = Query(None), 

249 tags: list[UUID4 | str] | None = Query(None), 

250 tools: list[UUID4 | str] | None = Query(None), 

251 foods: list[UUID4 | str] | None = Query(None), 

252 households: list[UUID4 | str] | None = Query(None), 

253 ): 

254 cookbook_data: ReadCookBook | None = None 1eb

255 if search_query.cookbook: 1eb

256 if isinstance(search_query.cookbook, UUID): 256 ↛ 257line 256 didn't jump to line 257 because the condition on line 256 was never true1eb

257 cb_match_attr = "id" 

258 else: 

259 try: 1eb

260 UUID(search_query.cookbook) 1eb

261 cb_match_attr = "id" 1eb

262 except ValueError: 1eb

263 cb_match_attr = "slug" 1eb

264 cookbook_data = self.group_cookbooks.get_one(search_query.cookbook, cb_match_attr) 1eb

265 

266 if cookbook_data is None: 266 ↛ 272line 266 didn't jump to line 272 because the condition on line 266 was always true1eb

267 raise HTTPException(status_code=404, detail="cookbook not found") 1eb

268 

269 # We use "group_recipes" here so we can return all recipes regardless of household. The query filter can 

270 # include a household_id to filter by household. 

271 # We use "by_user" so we can sort favorites and other user-specific data correctly. 

272 pagination_response = self.group_recipes.by_user(self.user.id).page_all( 1eb

273 pagination=q, 

274 cookbook=cookbook_data, 

275 categories=categories, 

276 tags=tags, 

277 tools=tools, 

278 foods=foods, 

279 households=households, 

280 require_all_categories=search_query.require_all_categories, 

281 require_all_tags=search_query.require_all_tags, 

282 require_all_tools=search_query.require_all_tools, 

283 require_all_foods=search_query.require_all_foods, 

284 search=search_query.search, 

285 ) 

286 

287 # merge default pagination with the request's query params 

288 query_params = q.model_dump() | {**request.query_params} 1eb

289 pagination_response.set_pagination_guides( 1eb

290 router.url_path_for("get_all"), 

291 {k: v for k, v in query_params.items() if v is not None}, 

292 ) 

293 

294 json_compatible_response = orjson.dumps(pagination_response.model_dump(by_alias=True)) 1eb

295 

296 # Response is returned directly, to avoid validation and improve performance 

297 return JSONBytes(content=json_compatible_response) 1eb

298 

299 @router.get("/suggestions", response_model=RecipeSuggestionResponse) 1a

300 def suggest_recipes( 1a

301 self, 

302 q: RecipeSuggestionQuery = Depends(make_dependable(RecipeSuggestionQuery)), 

303 foods: list[UUID4] | None = Query(None), 

304 tools: list[UUID4] | None = Query(None), 

305 ) -> RecipeSuggestionResponse: 

306 group_recipes_by_user = get_repositories( 1fb

307 self.session, group_id=self.group_id, household_id=None 

308 ).recipes.by_user(self.user.id) 

309 

310 recipes = group_recipes_by_user.find_suggested_recipes(q, foods, tools) 1fb

311 response = RecipeSuggestionResponse(items=recipes) 1fb

312 json_compatible_response = orjson.dumps(response.model_dump(by_alias=True)) 1fb

313 

314 # Response is returned directly, to avoid validation and improve performance 

315 return JSONBytes(content=json_compatible_response) 1fb

316 

317 @router.get("/{slug}", response_model=Recipe) 1a

318 def get_one(self, slug: str = Path(..., description="A recipe's slug or id")): 1a

319 """Takes in a recipe's slug or id and returns all data for a recipe""" 

320 try: 1fb

321 recipe = self.service.get_one(slug) 1fb

322 except Exception as e: 1fb

323 self.handle_exceptions(e) 1fb

324 return None 

325 

326 return recipe 

327 

328 @router.post("", status_code=201, response_model=str) 1a

329 def create_one(self, data: CreateRecipe) -> str | None: 1a

330 """Takes in a JSON string and loads data into the database as a new entry""" 

331 try: 1c

332 new_recipe = self.service.create_one(data) 1c

333 except Exception as e: 1c

334 self.handle_exceptions(e) 1c

335 return None 

336 

337 if new_recipe: 337 ↛ 350line 337 didn't jump to line 350 because the condition on line 337 was always true1c

338 self.publish_event( 1c

339 event_type=EventTypes.recipe_created, 

340 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug), 

341 group_id=new_recipe.group_id, 

342 household_id=new_recipe.household_id, 

343 message=self.t( 

344 "notifications.generic-created-with-url", 

345 name=new_recipe.name, 

346 url=urls.recipe_url(self.group.slug, new_recipe.slug, self.settings.BASE_URL), 

347 ), 

348 ) 

349 

350 return new_recipe.slug 1c

351 

352 @router.post("/{slug}/duplicate", status_code=201, response_model=Recipe) 1a

353 def duplicate_one(self, slug: str, req: RecipeDuplicate) -> Recipe: 1a

354 """Duplicates a recipe with a new custom name if given""" 

355 try: 1hb

356 new_recipe = self.service.duplicate_one(slug, req) 1hb

357 except Exception as e: 1hb

358 self.handle_exceptions(e) 1hb

359 

360 if new_recipe: 360 ↛ 372line 360 didn't jump to line 372 because the condition on line 360 was always true1h

361 self.publish_event( 1h

362 event_type=EventTypes.recipe_created, 

363 document_data=EventRecipeData(operation=EventOperation.create, recipe_slug=new_recipe.slug), 

364 group_id=new_recipe.group_id, 

365 household_id=new_recipe.household_id, 

366 message=self.t( 

367 "notifications.generic-duplicated", 

368 name=new_recipe.name, 

369 ), 

370 ) 

371 

372 return new_recipe 1h

373 

374 @router.put("/{slug}") 1a

375 def update_one(self, slug: str, data: Recipe): 1a

376 """Updates a recipe by existing slug and data.""" 

377 try: 

378 recipe = self.service.update_one(slug, data) 

379 except Exception as e: 

380 self.handle_exceptions(e) 

381 

382 if recipe: 

383 self.publish_event( 

384 event_type=EventTypes.recipe_updated, 

385 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug), 

386 group_id=recipe.group_id, 

387 household_id=recipe.household_id, 

388 message=self.t( 

389 "notifications.generic-updated-with-url", 

390 name=recipe.name, 

391 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL), 

392 ), 

393 ) 

394 

395 return recipe 

396 

397 @router.put("") 1a

398 def update_many(self, data: list[Recipe]): 1a

399 updated_by_group_and_household: defaultdict[UUID4, defaultdict[UUID4, list[Recipe]]] = defaultdict( 

400 lambda: defaultdict(list) 

401 ) 

402 for recipe in data: 

403 r = self.service.update_one(recipe.id, recipe) # type: ignore 

404 updated_by_group_and_household[r.group_id][r.household_id].append(r) 

405 

406 all_updated: list[Recipe] = [] 

407 if updated_by_group_and_household: 

408 for group_id, household_dict in updated_by_group_and_household.items(): 

409 for household_id, updated_recipes in household_dict.items(): 

410 all_updated.extend(updated_recipes) 

411 self.publish_event( 

412 event_type=EventTypes.recipe_updated, 

413 document_data=EventRecipeBulkData( 

414 operation=EventOperation.update, recipe_slugs=[r.slug for r in updated_recipes] 

415 ), 

416 group_id=group_id, 

417 household_id=household_id, 

418 ) 

419 

420 return all_updated 

421 

422 @router.patch("/{slug}") 1a

423 def patch_one(self, slug: str, data: Recipe): 1a

424 """Updates a recipe by existing slug and data.""" 

425 try: 

426 recipe = self.service.patch_one(slug, data) 

427 except Exception as e: 

428 self.handle_exceptions(e) 

429 

430 if recipe: 

431 self.publish_event( 

432 event_type=EventTypes.recipe_updated, 

433 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug), 

434 group_id=recipe.group_id, 

435 household_id=recipe.household_id, 

436 message=self.t( 

437 "notifications.generic-updated-with-url", 

438 name=recipe.name, 

439 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL), 

440 ), 

441 ) 

442 

443 return recipe 

444 

445 @router.patch("") 1a

446 def patch_many(self, data: list[Recipe]): 1a

447 updated_by_group_and_household: defaultdict[UUID4, defaultdict[UUID4, list[Recipe]]] = defaultdict( 

448 lambda: defaultdict(list) 

449 ) 

450 for recipe in data: 

451 r = self.service.patch_one(recipe.id, recipe) # type: ignore 

452 updated_by_group_and_household[r.group_id][r.household_id].append(r) 

453 

454 all_updated: list[Recipe] = [] 

455 if updated_by_group_and_household: 455 ↛ 456line 455 didn't jump to line 456 because the condition on line 455 was never true

456 for group_id, household_dict in updated_by_group_and_household.items(): 

457 for household_id, updated_recipes in household_dict.items(): 

458 all_updated.extend(updated_recipes) 

459 self.publish_event( 

460 event_type=EventTypes.recipe_updated, 

461 document_data=EventRecipeBulkData( 

462 operation=EventOperation.update, recipe_slugs=[r.slug for r in updated_recipes] 

463 ), 

464 group_id=group_id, 

465 household_id=household_id, 

466 ) 

467 

468 return all_updated 

469 

470 @router.patch("/{slug}/last-made") 1a

471 def update_last_made(self, slug: str, data: RecipeLastMade): 1a

472 """Update a recipe's last made timestamp""" 

473 

474 try: 

475 recipe = self.service.update_last_made(slug, data.timestamp) 

476 except Exception as e: 

477 self.handle_exceptions(e) 

478 

479 if recipe: 

480 self.publish_event( 

481 event_type=EventTypes.recipe_updated, 

482 document_data=EventRecipeData(operation=EventOperation.update, recipe_slug=recipe.slug), 

483 group_id=recipe.group_id, 

484 household_id=recipe.household_id, 

485 message=self.t( 

486 "notifications.generic-updated-with-url", 

487 name=recipe.name, 

488 url=urls.recipe_url(self.group.slug, recipe.slug, self.settings.BASE_URL), 

489 ), 

490 ) 

491 

492 return recipe 

493 

494 @router.delete("/{slug}") 1a

495 def delete_one(self, slug: str): 1a

496 """Deletes a recipe by slug""" 

497 try: 

498 recipe = self.service.delete_one(slug) 

499 except Exception as e: 

500 self.handle_exceptions(e) 

501 

502 if recipe: 

503 self.publish_event( 

504 event_type=EventTypes.recipe_deleted, 

505 document_data=EventRecipeData(operation=EventOperation.delete, recipe_slug=recipe.slug), 

506 group_id=recipe.group_id, 

507 household_id=recipe.household_id, 

508 message=self.t("notifications.generic-deleted", name=recipe.name), 

509 ) 

510 

511 return recipe 

512 

513 # ================================================================================================================== 

514 # Image and Assets 

515 

516 @router.post("/{slug}/image", tags=["Recipe: Images and Assets"]) 1a

517 async def scrape_image_url(self, slug: str, url: ScrapeRecipe): 1a

518 recipe = self.mixins.get_one(slug) 1jb

519 data_service = RecipeDataService(recipe.id) 

520 

521 try: 

522 await data_service.scrape_image(url.url) 

523 except NotAnImageError as e: 

524 raise HTTPException( 

525 status_code=400, 

526 detail=ErrorResponse.respond("Url is not an image"), 

527 ) from e 

528 except InvalidDomainError as e: 

529 raise HTTPException( 

530 status_code=400, 

531 detail=ErrorResponse.respond("Url is not from an allowed domain"), 

532 ) from e 

533 

534 recipe.image = cache.cache_key.new_key() 

535 self.service.update_one(recipe.slug, recipe) 

536 

537 @router.put("/{slug}/image", response_model=UpdateImageResponse, tags=["Recipe: Images and Assets"]) 1a

538 def update_recipe_image(self, slug: str, image: bytes = File(...), extension: str = Form(...)): 1a

539 try: 

540 new_version = self.service.update_recipe_image(slug, image, extension) 

541 return UpdateImageResponse(image=new_version) 

542 except Exception as e: 

543 self.handle_exceptions(e) 

544 return None 

545 

546 @router.post("/{slug}/assets", response_model=RecipeAsset, tags=["Recipe: Images and Assets"]) 1a

547 def upload_recipe_asset( 1a

548 self, 

549 slug: str, 

550 name: str = Form(...), 

551 icon: str = Form(...), 

552 extension: str = Form(...), 

553 file: UploadFile = File(...), 

554 ): 

555 """Upload a file to store as a recipe asset""" 

556 if "." in extension: 1db

557 extension = extension.split(".")[-1] 1db

558 

559 file_slug = slugify(name) 1db

560 if not extension or not file_slug: 1db

561 raise HTTPException(status_code=400, detail="Missing required fields") 1db

562 

563 file_name = f"{file_slug}.{extension}" 1db

564 asset_in = RecipeAsset(name=name, icon=icon, file_name=file_name) 1db

565 

566 recipe = self.service.get_one(slug) 1db

567 

568 dest = recipe.asset_dir / file_name 1d

569 

570 # Ensure path is relative to the recipe's asset directory 

571 if dest.absolute().parent != recipe.asset_dir: 571 ↛ 572line 571 didn't jump to line 572 because the condition on line 571 was never true1d

572 raise HTTPException( 

573 status_code=400, 

574 detail=f"File name {file_name} or extension {extension} not valid", 

575 ) 

576 

577 with dest.open("wb") as buffer: 1d

578 copyfileobj(file.file, buffer) 1d

579 

580 if not dest.is_file(): 580 ↛ 581line 580 didn't jump to line 581 because the condition on line 580 was never true1d

581 raise HTTPException(status.HTTP_500_INTERNAL_SERVER_ERROR) 

582 

583 if recipe.assets is not None: 583 ↛ 586line 583 didn't jump to line 586 because the condition on line 583 was always true1d

584 recipe.assets.append(asset_in) 1d

585 

586 self.service.update_one(slug, recipe) 1d

587 

588 return asset_in 1d