From 2a80251dc35d97d5417608eb31c58220f90ceb63 Mon Sep 17 00:00:00 2001
From: Mert <101130780+mertalev@users.noreply.github.com>
Date: Tue, 6 May 2025 14:18:22 -0400
Subject: [PATCH] fix(server): more robust person thumbnail generation (#17974)

* more robust person thumbnail generation

* clamp bounding boxes

* update sql

* no need to process invalid images after decoding

* cursed knowledge

* new line
---
 docs/src/pages/cursed-knowledge.tsx          |  13 +
 server/src/queries/person.repository.sql     |  11 +-
 server/src/repositories/person.repository.ts |   9 +-
 server/src/services/media.service.spec.ts    | 321 ++++++++++++++++++-
 server/src/services/media.service.ts         | 101 +++++-
 server/src/services/person.service.spec.ts   | 112 +------
 server/src/services/person.service.ts        |  95 +-----
 server/src/utils/misc.ts                     |   4 +
 server/test/fixtures/person.stub.ts          |  48 ++-
 9 files changed, 491 insertions(+), 223 deletions(-)

diff --git a/docs/src/pages/cursed-knowledge.tsx b/docs/src/pages/cursed-knowledge.tsx
index 1e5c724d16..6a0981a596 100644
--- a/docs/src/pages/cursed-knowledge.tsx
+++ b/docs/src/pages/cursed-knowledge.tsx
@@ -2,6 +2,7 @@ import {
   mdiBug,
   mdiCalendarToday,
   mdiCrosshairsOff,
+  mdiCrop,
   mdiDatabase,
   mdiLeadPencil,
   mdiLockOff,
@@ -22,6 +23,18 @@ const withLanguage = (date: Date) => (language: string) => date.toLocaleDateStri
 type Item = Omit<TimelineItem, 'done' | 'getDateLabel'> & { date: Date };
 
 const items: Item[] = [
+  {
+    icon: mdiCrop,
+    iconColor: 'tomato',
+    title: 'Image dimensions in EXIF metadata are cursed',
+    description:
+      'The dimensions in EXIF metadata can be different from the actual dimensions of the image, causing issues with cropping and resizing.',
+    link: {
+      url: 'https://github.com/immich-app/immich/pull/17974',
+      text: '#17974',
+    },
+    date: new Date(2025, 5, 5),
+  },
   {
     icon: mdiMicrosoftWindows,
     iconColor: '#357EC7',
diff --git a/server/src/queries/person.repository.sql b/server/src/queries/person.repository.sql
index 09517a1e14..c77d9835fa 100644
--- a/server/src/queries/person.repository.sql
+++ b/server/src/queries/person.repository.sql
@@ -143,23 +143,20 @@ select
   "asset_faces"."boundingBoxY2" as "y2",
   "asset_faces"."imageWidth" as "oldWidth",
   "asset_faces"."imageHeight" as "oldHeight",
-  "exif"."exifImageWidth",
-  "exif"."exifImageHeight",
   "assets"."type",
   "assets"."originalPath",
-  "asset_files"."path" as "previewPath"
+  "asset_files"."path" as "previewPath",
+  "exif"."orientation" as "exifOrientation"
 from
   "person"
   inner join "asset_faces" on "asset_faces"."id" = "person"."faceAssetId"
   inner join "assets" on "asset_faces"."assetId" = "assets"."id"
-  inner join "exif" on "exif"."assetId" = "assets"."id"
-  inner join "asset_files" on "asset_files"."assetId" = "assets"."id"
+  left join "exif" on "exif"."assetId" = "assets"."id"
+  left join "asset_files" on "asset_files"."assetId" = "assets"."id"
 where
   "person"."id" = $1
   and "asset_faces"."deletedAt" is null
   and "asset_files"."type" = $2
-  and "exif"."exifImageWidth" > $3
-  and "exif"."exifImageHeight" > $4
 
 -- PersonRepository.reassignFace
 update "asset_faces"
diff --git a/server/src/repositories/person.repository.ts b/server/src/repositories/person.repository.ts
index b55537bdba..789c47ccaf 100644
--- a/server/src/repositories/person.repository.ts
+++ b/server/src/repositories/person.repository.ts
@@ -264,8 +264,8 @@ export class PersonRepository {
       .selectFrom('person')
       .innerJoin('asset_faces', 'asset_faces.id', 'person.faceAssetId')
       .innerJoin('assets', 'asset_faces.assetId', 'assets.id')
-      .innerJoin('exif', 'exif.assetId', 'assets.id')
-      .innerJoin('asset_files', 'asset_files.assetId', 'assets.id')
+      .leftJoin('exif', 'exif.assetId', 'assets.id')
+      .leftJoin('asset_files', 'asset_files.assetId', 'assets.id')
       .select([
         'person.ownerId',
         'asset_faces.boundingBoxX1 as x1',
@@ -274,17 +274,14 @@ export class PersonRepository {
         'asset_faces.boundingBoxY2 as y2',
         'asset_faces.imageWidth as oldWidth',
         'asset_faces.imageHeight as oldHeight',
-        'exif.exifImageWidth',
-        'exif.exifImageHeight',
         'assets.type',
         'assets.originalPath',
         'asset_files.path as previewPath',
+        'exif.orientation as exifOrientation',
       ])
       .where('person.id', '=', id)
       .where('asset_faces.deletedAt', 'is', null)
       .where('asset_files.type', '=', AssetFileType.PREVIEW)
-      .where('exif.exifImageWidth', '>', 0)
-      .where('exif.exifImageHeight', '>', 0)
       .$narrowType<{ exifImageWidth: NotNull; exifImageHeight: NotNull }>()
       .executeTakeFirst();
   }
diff --git a/server/src/services/media.service.spec.ts b/server/src/services/media.service.spec.ts
index d747ee0ba8..fa7cfc096a 100644
--- a/server/src/services/media.service.spec.ts
+++ b/server/src/services/media.service.spec.ts
@@ -7,6 +7,7 @@ import {
   AssetType,
   AudioCodec,
   Colorspace,
+  ExifOrientation,
   ImageFormat,
   JobName,
   JobStatus,
@@ -20,7 +21,8 @@ import { JobCounts, RawImageInfo } from 'src/types';
 import { assetStub } from 'test/fixtures/asset.stub';
 import { faceStub } from 'test/fixtures/face.stub';
 import { probeStub } from 'test/fixtures/media.stub';
-import { personStub } from 'test/fixtures/person.stub';
+import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
+import { systemConfigStub } from 'test/fixtures/system-config.stub';
 import { makeStream, newTestService, ServiceMocks } from 'test/utils';
 
 describe(MediaService.name, () => {
@@ -872,6 +874,323 @@ describe(MediaService.name, () => {
     });
   });
 
+  describe('handleGeneratePersonThumbnail', () => {
+    it('should skip if machine learning is disabled', async () => {
+      mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED);
+      expect(mocks.asset.getByIds).not.toHaveBeenCalled();
+      expect(mocks.systemMetadata.get).toHaveBeenCalled();
+    });
+
+    it('should skip a person not found', async () => {
+      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
+      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
+    });
+
+    it('should skip a person without a face asset id', async () => {
+      mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
+      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
+      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
+    });
+
+    it('should skip a person with face not found', async () => {
+      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
+      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
+    });
+
+    it('should generate a thumbnail', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 1000, height: 1000 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
+      expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/admin_id/pe/rs');
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailMiddle.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            left: 238,
+            top: 163,
+            width: 274,
+            height: 274,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+      expect(mocks.person.update).toHaveBeenCalledWith({
+        id: 'person-1',
+        thumbnailPath: 'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      });
+    });
+
+    it('should generate a thumbnail without going negative', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 2160, height: 3840 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailStart.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            left: 0,
+            top: 85,
+            width: 510,
+            height: 510,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+    });
+
+    it('should generate a thumbnail without overflowing', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailEnd);
+      mocks.person.update.mockResolvedValue(personStub.primaryPerson);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 1000, height: 1000 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.newThumbnailEnd.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            left: 591,
+            top: 591,
+            width: 408,
+            height: 408,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+    });
+
+    it('should handle negative coordinates', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.negativeCoordinate);
+      mocks.person.update.mockResolvedValue(personStub.primaryPerson);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 4624, height: 3080 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.negativeCoordinate.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            left: 0,
+            top: 62,
+            width: 412,
+            height: 412,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+    });
+
+    it('should handle overflowing coordinate', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.overflowingCoordinate);
+      mocks.person.update.mockResolvedValue(personStub.primaryPerson);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 4624, height: 3080 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.overflowingCoordinate.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            left: 4485,
+            top: 94,
+            width: 138,
+            height: 138,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+    });
+
+    it('should use embedded preview if enabled and raw image', async () => {
+      mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
+      mocks.person.update.mockResolvedValue(personStub.primaryPerson);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const extracted = Buffer.from('');
+      const data = Buffer.from('');
+      const info = { width: 2160, height: 3840 } as OutputInfo;
+      mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG });
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+      mocks.media.getImageDimensions.mockResolvedValue(info);
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(extracted, {
+        colorspace: Colorspace.P3,
+        orientation: ExifOrientation.Horizontal,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
+        data,
+        {
+          colorspace: Colorspace.P3,
+          format: ImageFormat.JPEG,
+          quality: 80,
+          crop: {
+            height: 844,
+            left: 388,
+            top: 730,
+            width: 844,
+          },
+          raw: info,
+          processInvalidImages: false,
+          size: 250,
+        },
+        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
+      );
+    });
+
+    it('should not use embedded preview if enabled and not raw image', async () => {
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 2160, height: 3840 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.extract).not.toHaveBeenCalled();
+      expect(mocks.media.generateThumbnail).toHaveBeenCalled();
+    });
+
+    it('should not use embedded preview if enabled and raw image if not exists', async () => {
+      mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const data = Buffer.from('');
+      const info = { width: 2160, height: 3840 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalled();
+    });
+
+    it('should not use embedded preview if enabled and raw image if low resolution', async () => {
+      mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
+      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.rawEmbeddedThumbnail);
+      mocks.media.generateThumbnail.mockResolvedValue();
+      const extracted = Buffer.from('');
+      const data = Buffer.from('');
+      const info = { width: 1000, height: 1000 } as OutputInfo;
+      mocks.media.decodeImage.mockResolvedValue({ data, info });
+      mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.JPEG });
+      mocks.media.getImageDimensions.mockResolvedValue(info);
+
+      await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
+        JobStatus.SUCCESS,
+      );
+
+      expect(mocks.media.extract).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath);
+      expect(mocks.media.decodeImage).toHaveBeenCalledWith(personThumbnailStub.rawEmbeddedThumbnail.originalPath, {
+        colorspace: Colorspace.P3,
+        orientation: undefined,
+        processInvalidImages: false,
+      });
+      expect(mocks.media.generateThumbnail).toHaveBeenCalled();
+    });
+  });
+
   describe('handleQueueVideoConversion', () => {
     it('should queue all video assets', async () => {
       mocks.assetJob.streamForVideoConversion.mockReturnValue(makeStream([assetStub.video]));
diff --git a/server/src/services/media.service.ts b/server/src/services/media.service.ts
index adc8c4b904..048d03f493 100644
--- a/server/src/services/media.service.ts
+++ b/server/src/services/media.service.ts
@@ -1,5 +1,5 @@
 import { Injectable } from '@nestjs/common';
-import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
+import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
 import { StorageCore, ThumbnailPathEntity } from 'src/cores/storage.core';
 import { Exif } from 'src/database';
 import { OnEvent, OnJob } from 'src/decorators';
@@ -11,6 +11,7 @@ import {
   AssetVisibility,
   AudioCodec,
   Colorspace,
+  ImageFormat,
   JobName,
   JobStatus,
   LogLevel,
@@ -24,10 +25,13 @@ import {
   VideoContainer,
 } from 'src/enum';
 import { UpsertFileOptions } from 'src/repositories/asset.repository';
+import { BoundingBox } from 'src/repositories/machine-learning.repository';
 import { BaseService } from 'src/services/base.service';
 import {
   AudioStreamInfo,
+  CropOptions,
   DecodeToBufferOptions,
+  ImageDimensions,
   JobItem,
   JobOf,
   VideoFormat,
@@ -37,6 +41,7 @@ import {
 import { getAssetFiles } from 'src/utils/asset.util';
 import { BaseConfig, ThumbnailConfig } from 'src/utils/media';
 import { mimeTypes } from 'src/utils/mime-types';
+import { clamp, isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
 
 @Injectable()
 export class MediaService extends BaseService {
@@ -308,6 +313,100 @@ export class MediaService extends BaseService {
     return { previewPath, thumbnailPath, fullsizePath, thumbhash: outputs[0] as Buffer };
   }
 
+  @OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION })
+  async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GENERATE_PERSON_THUMBNAIL>): Promise<JobStatus> {
+    const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
+    if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
+      return JobStatus.SKIPPED;
+    }
+
+    const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
+    if (!data) {
+      this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
+      return JobStatus.FAILED;
+    }
+
+    const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight, exifOrientation, previewPath, originalPath } = data;
+    let inputImage: string | Buffer;
+    if (mimeTypes.isVideo(originalPath)) {
+      if (!previewPath) {
+        this.logger.error(`Could not generate person thumbnail for video ${id}: missing preview path`);
+        return JobStatus.FAILED;
+      }
+      inputImage = previewPath;
+    }
+
+    if (image.extractEmbedded && mimeTypes.isRaw(originalPath)) {
+      const extracted = await this.extractImage(originalPath, image.preview.size);
+      inputImage = extracted ? extracted.buffer : originalPath;
+    } else {
+      inputImage = originalPath;
+    }
+
+    const { data: decodedImage, info } = await this.mediaRepository.decodeImage(inputImage, {
+      colorspace: image.colorspace,
+      processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
+      // if this is an extracted image, it may not have orientation metadata
+      orientation: Buffer.isBuffer(inputImage) && exifOrientation ? Number(exifOrientation) : undefined,
+    });
+
+    const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
+    this.storageCore.ensureFolders(thumbnailPath);
+
+    const thumbnailOptions = {
+      colorspace: image.colorspace,
+      format: ImageFormat.JPEG,
+      raw: info,
+      quality: image.thumbnail.quality,
+      crop: this.getCrop(
+        { old: { width: oldWidth, height: oldHeight }, new: { width: info.width, height: info.height } },
+        { x1, y1, x2, y2 },
+      ),
+      processInvalidImages: false,
+      size: FACE_THUMBNAIL_SIZE,
+    };
+
+    await this.mediaRepository.generateThumbnail(decodedImage, thumbnailOptions, thumbnailPath);
+    await this.personRepository.update({ id, thumbnailPath });
+
+    return JobStatus.SUCCESS;
+  }
+
+  private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
+    // face bounding boxes can spill outside the image dimensions
+    const clampedX1 = clamp(x1, 0, dims.old.width);
+    const clampedY1 = clamp(y1, 0, dims.old.height);
+    const clampedX2 = clamp(x2, 0, dims.old.width);
+    const clampedY2 = clamp(y2, 0, dims.old.height);
+
+    const widthScale = dims.new.width / dims.old.width;
+    const heightScale = dims.new.height / dims.old.height;
+
+    const halfWidth = (widthScale * (clampedX2 - clampedX1)) / 2;
+    const halfHeight = (heightScale * (clampedY2 - clampedY1)) / 2;
+
+    const middleX = Math.round(widthScale * clampedX1 + halfWidth);
+    const middleY = Math.round(heightScale * clampedY1 + halfHeight);
+
+    // zoom out 10%
+    const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
+
+    // get the longest distance from the center of the image without overflowing
+    const newHalfSize = Math.min(
+      middleX - Math.max(0, middleX - targetHalfSize),
+      middleY - Math.max(0, middleY - targetHalfSize),
+      Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX,
+      Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY,
+    );
+
+    return {
+      left: middleX - newHalfSize,
+      top: middleY - newHalfSize,
+      width: newHalfSize * 2,
+      height: newHalfSize * 2,
+    };
+  }
+
   private async generateVideoThumbnails(asset: ThumbnailPathEntity & { originalPath: string }) {
     const { image, ffmpeg } = await this.getConfig({ withCache: true });
     const previewPath = StorageCore.getImagePath(asset, AssetPathType.PREVIEW, image.preview.format);
diff --git a/server/src/services/person.service.spec.ts b/server/src/services/person.service.spec.ts
index 5b88883472..52e5ff03ee 100644
--- a/server/src/services/person.service.spec.ts
+++ b/server/src/services/person.service.spec.ts
@@ -1,7 +1,7 @@
 import { BadRequestException, NotFoundException } from '@nestjs/common';
 import { BulkIdErrorReason } from 'src/dtos/asset-ids.response.dto';
 import { mapFaces, mapPerson, PersonResponseDto } from 'src/dtos/person.dto';
-import { CacheControl, Colorspace, ImageFormat, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
+import { CacheControl, JobName, JobStatus, SourceType, SystemMetadataKey } from 'src/enum';
 import { DetectedFaces } from 'src/repositories/machine-learning.repository';
 import { FaceSearchResult } from 'src/repositories/search.repository';
 import { PersonService } from 'src/services/person.service';
@@ -9,7 +9,7 @@ import { ImmichFileResponse } from 'src/utils/file';
 import { assetStub } from 'test/fixtures/asset.stub';
 import { authStub } from 'test/fixtures/auth.stub';
 import { faceStub } from 'test/fixtures/face.stub';
-import { personStub, personThumbnailStub } from 'test/fixtures/person.stub';
+import { personStub } from 'test/fixtures/person.stub';
 import { systemConfigStub } from 'test/fixtures/system-config.stub';
 import { factory } from 'test/small.factory';
 import { makeStream, newTestService, ServiceMocks } from 'test/utils';
@@ -1024,114 +1024,6 @@ describe(PersonService.name, () => {
     });
   });
 
-  describe('handleGeneratePersonThumbnail', () => {
-    it('should skip if machine learning is disabled', async () => {
-      mocks.systemMetadata.get.mockResolvedValue(systemConfigStub.machineLearningDisabled);
-
-      await expect(sut.handleGeneratePersonThumbnail({ id: 'person-1' })).resolves.toBe(JobStatus.SKIPPED);
-      expect(mocks.asset.getByIds).not.toHaveBeenCalled();
-      expect(mocks.systemMetadata.get).toHaveBeenCalled();
-    });
-
-    it('should skip a person not found', async () => {
-      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
-      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
-    });
-
-    it('should skip a person without a face asset id', async () => {
-      mocks.person.getById.mockResolvedValue(personStub.noThumbnail);
-      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
-      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
-    });
-
-    it('should skip a person with face not found', async () => {
-      await sut.handleGeneratePersonThumbnail({ id: 'person-1' });
-      expect(mocks.media.generateThumbnail).not.toHaveBeenCalled();
-    });
-
-    it('should generate a thumbnail', async () => {
-      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailMiddle);
-      mocks.media.generateThumbnail.mockResolvedValue();
-
-      await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
-
-      expect(mocks.person.getDataForThumbnailGenerationJob).toHaveBeenCalledWith(personStub.primaryPerson.id);
-      expect(mocks.storage.mkdirSync).toHaveBeenCalledWith('upload/thumbs/admin_id/pe/rs');
-      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
-        assetStub.primaryImage.originalPath,
-        {
-          colorspace: Colorspace.P3,
-          format: ImageFormat.JPEG,
-          size: 250,
-          quality: 80,
-          crop: {
-            left: 238,
-            top: 163,
-            width: 274,
-            height: 274,
-          },
-          processInvalidImages: false,
-        },
-        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
-      );
-      expect(mocks.person.update).toHaveBeenCalledWith({
-        id: 'person-1',
-        thumbnailPath: 'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
-      });
-    });
-
-    it('should generate a thumbnail without going negative', async () => {
-      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailStart);
-      mocks.media.generateThumbnail.mockResolvedValue();
-
-      await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
-
-      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
-        assetStub.primaryImage.originalPath,
-        {
-          colorspace: Colorspace.P3,
-          format: ImageFormat.JPEG,
-          size: 250,
-          quality: 80,
-          crop: {
-            left: 0,
-            top: 85,
-            width: 510,
-            height: 510,
-          },
-          processInvalidImages: false,
-        },
-        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
-      );
-    });
-
-    it('should generate a thumbnail without overflowing', async () => {
-      mocks.person.getDataForThumbnailGenerationJob.mockResolvedValue(personThumbnailStub.newThumbnailEnd);
-      mocks.person.update.mockResolvedValue(personStub.primaryPerson);
-      mocks.media.generateThumbnail.mockResolvedValue();
-
-      await sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id });
-
-      expect(mocks.media.generateThumbnail).toHaveBeenCalledWith(
-        assetStub.primaryImage.originalPath,
-        {
-          colorspace: Colorspace.P3,
-          format: ImageFormat.JPEG,
-          size: 250,
-          quality: 80,
-          crop: {
-            left: 591,
-            top: 591,
-            width: 408,
-            height: 408,
-          },
-          processInvalidImages: false,
-        },
-        'upload/thumbs/admin_id/pe/rs/person-1.jpeg',
-      );
-    });
-  });
-
   describe('mergePerson', () => {
     it('should require person.write and person.merge permission', async () => {
       mocks.person.getById.mockResolvedValueOnce(personStub.primaryPerson);
diff --git a/server/src/services/person.service.ts b/server/src/services/person.service.ts
index 77a9c70300..e6161b8f9c 100644
--- a/server/src/services/person.service.ts
+++ b/server/src/services/person.service.ts
@@ -1,7 +1,6 @@
 import { BadRequestException, Injectable, NotFoundException } from '@nestjs/common';
 import { Insertable, Updateable } from 'kysely';
-import { FACE_THUMBNAIL_SIZE, JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
-import { StorageCore } from 'src/cores/storage.core';
+import { JOBS_ASSET_PAGINATION_SIZE } from 'src/constants';
 import { Person } from 'src/database';
 import { AssetFaces, FaceSearch } from 'src/db';
 import { Chunked, OnJob } from 'src/decorators';
@@ -25,10 +24,8 @@ import {
   PersonUpdateDto,
 } from 'src/dtos/person.dto';
 import {
-  AssetType,
   AssetVisibility,
   CacheControl,
-  ImageFormat,
   JobName,
   JobStatus,
   Permission,
@@ -40,10 +37,10 @@ import {
 import { BoundingBox } from 'src/repositories/machine-learning.repository';
 import { UpdateFacesData } from 'src/repositories/person.repository';
 import { BaseService } from 'src/services/base.service';
-import { CropOptions, ImageDimensions, InputDimensions, JobItem, JobOf } from 'src/types';
+import { JobItem, JobOf } from 'src/types';
 import { ImmichFileResponse } from 'src/utils/file';
 import { mimeTypes } from 'src/utils/mime-types';
-import { isFaceImportEnabled, isFacialRecognitionEnabled } from 'src/utils/misc';
+import { isFacialRecognitionEnabled } from 'src/utils/misc';
 
 @Injectable()
 export class PersonService extends BaseService {
@@ -537,41 +534,6 @@ export class PersonService extends BaseService {
     return JobStatus.SUCCESS;
   }
 
-  @OnJob({ name: JobName.GENERATE_PERSON_THUMBNAIL, queue: QueueName.THUMBNAIL_GENERATION })
-  async handleGeneratePersonThumbnail({ id }: JobOf<JobName.GENERATE_PERSON_THUMBNAIL>): Promise<JobStatus> {
-    const { machineLearning, metadata, image } = await this.getConfig({ withCache: true });
-    if (!isFacialRecognitionEnabled(machineLearning) && !isFaceImportEnabled(metadata)) {
-      return JobStatus.SKIPPED;
-    }
-
-    const data = await this.personRepository.getDataForThumbnailGenerationJob(id);
-    if (!data) {
-      this.logger.error(`Could not generate person thumbnail for ${id}: missing data`);
-      return JobStatus.FAILED;
-    }
-
-    const { ownerId, x1, y1, x2, y2, oldWidth, oldHeight } = data;
-
-    const { width, height, inputPath } = await this.getInputDimensions(data);
-
-    const thumbnailPath = StorageCore.getPersonThumbnailPath({ id, ownerId });
-    this.storageCore.ensureFolders(thumbnailPath);
-
-    const thumbnailOptions = {
-      colorspace: image.colorspace,
-      format: ImageFormat.JPEG,
-      size: FACE_THUMBNAIL_SIZE,
-      quality: image.thumbnail.quality,
-      crop: this.getCrop({ old: { width: oldWidth, height: oldHeight }, new: { width, height } }, { x1, y1, x2, y2 }),
-      processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
-    };
-
-    await this.mediaRepository.generateThumbnail(inputPath, thumbnailOptions, thumbnailPath);
-    await this.personRepository.update({ id, thumbnailPath });
-
-    return JobStatus.SUCCESS;
-  }
-
   async mergePerson(auth: AuthDto, id: string, dto: MergePersonDto): Promise<BulkIdResponseDto[]> {
     const mergeIds = dto.ids;
     if (mergeIds.includes(id)) {
@@ -642,57 +604,6 @@ export class PersonService extends BaseService {
     return person;
   }
 
-  private async getInputDimensions(asset: {
-    type: AssetType;
-    exifImageWidth: number;
-    exifImageHeight: number;
-    previewPath: string;
-    originalPath: string;
-    oldWidth: number;
-    oldHeight: number;
-  }): Promise<InputDimensions> {
-    if (asset.type === AssetType.IMAGE) {
-      let { exifImageWidth: width, exifImageHeight: height } = asset;
-      if (asset.oldHeight > asset.oldWidth !== height > width) {
-        [width, height] = [height, width];
-      }
-
-      return { width, height, inputPath: asset.originalPath };
-    }
-
-    const { width, height } = await this.mediaRepository.getImageDimensions(asset.previewPath);
-    return { width, height, inputPath: asset.previewPath };
-  }
-
-  private getCrop(dims: { old: ImageDimensions; new: ImageDimensions }, { x1, y1, x2, y2 }: BoundingBox): CropOptions {
-    const widthScale = dims.new.width / dims.old.width;
-    const heightScale = dims.new.height / dims.old.height;
-
-    const halfWidth = (widthScale * (x2 - x1)) / 2;
-    const halfHeight = (heightScale * (y2 - y1)) / 2;
-
-    const middleX = Math.round(widthScale * x1 + halfWidth);
-    const middleY = Math.round(heightScale * y1 + halfHeight);
-
-    // zoom out 10%
-    const targetHalfSize = Math.floor(Math.max(halfWidth, halfHeight) * 1.1);
-
-    // get the longest distance from the center of the image without overflowing
-    const newHalfSize = Math.min(
-      middleX - Math.max(0, middleX - targetHalfSize),
-      middleY - Math.max(0, middleY - targetHalfSize),
-      Math.min(dims.new.width - 1, middleX + targetHalfSize) - middleX,
-      Math.min(dims.new.height - 1, middleY + targetHalfSize) - middleY,
-    );
-
-    return {
-      left: middleX - newHalfSize,
-      top: middleY - newHalfSize,
-      width: newHalfSize * 2,
-      height: newHalfSize * 2,
-    };
-  }
-
   // TODO return a asset face response
   async createFace(auth: AuthDto, dto: AssetFaceCreateDto): Promise<void> {
     await Promise.all([
diff --git a/server/src/utils/misc.ts b/server/src/utils/misc.ts
index ff1656da74..05811350e4 100644
--- a/server/src/utils/misc.ts
+++ b/server/src/utils/misc.ts
@@ -301,3 +301,7 @@ export const globToSqlPattern = (glob: string) => {
   const tokens = picomatch.parse(glob).tokens;
   return tokens.map((token) => convertTokenToSqlPattern(token)).join('');
 };
+
+export function clamp(value: number, min: number, max: number) {
+  return Math.max(min, Math.min(max, value));
+}
diff --git a/server/test/fixtures/person.stub.ts b/server/test/fixtures/person.stub.ts
index 8457f9ddcd..21a184035a 100644
--- a/server/test/fixtures/person.stub.ts
+++ b/server/test/fixtures/person.stub.ts
@@ -178,8 +178,7 @@ export const personThumbnailStub = {
     oldWidth: 2160,
     type: AssetType.IMAGE,
     originalPath: '/original/path.jpg',
-    exifImageHeight: 3840,
-    exifImageWidth: 2160,
+    exifOrientation: '1',
     previewPath: previewFile.path,
   }),
   newThumbnailMiddle: Object.freeze({
@@ -192,8 +191,7 @@ export const personThumbnailStub = {
     oldWidth: 400,
     type: AssetType.IMAGE,
     originalPath: '/original/path.jpg',
-    exifImageHeight: 1000,
-    exifImageWidth: 1000,
+    exifOrientation: '1',
     previewPath: previewFile.path,
   }),
   newThumbnailEnd: Object.freeze({
@@ -206,8 +204,46 @@ export const personThumbnailStub = {
     oldWidth: 500,
     type: AssetType.IMAGE,
     originalPath: '/original/path.jpg',
-    exifImageHeight: 1000,
-    exifImageWidth: 1000,
+    exifOrientation: '1',
+    previewPath: previewFile.path,
+  }),
+  rawEmbeddedThumbnail: Object.freeze({
+    ownerId: userStub.admin.id,
+    x1: 100,
+    y1: 100,
+    x2: 200,
+    y2: 200,
+    oldHeight: 500,
+    oldWidth: 400,
+    type: AssetType.IMAGE,
+    originalPath: '/original/path.dng',
+    exifOrientation: '1',
+    previewPath: previewFile.path,
+  }),
+  negativeCoordinate: Object.freeze({
+    ownerId: userStub.admin.id,
+    x1: -176,
+    y1: -230,
+    x2: 193,
+    y2: 251,
+    oldHeight: 1440,
+    oldWidth: 2162,
+    type: AssetType.IMAGE,
+    originalPath: '/original/path.jpg',
+    exifOrientation: '1',
+    previewPath: previewFile.path,
+  }),
+  overflowingCoordinate: Object.freeze({
+    ownerId: userStub.admin.id,
+    x1: 2097,
+    y1: 0,
+    x2: 2171,
+    y2: 152,
+    oldHeight: 1440,
+    oldWidth: 2162,
+    type: AssetType.IMAGE,
+    originalPath: '/original/path.jpg',
+    exifOrientation: '1',
     previewPath: previewFile.path,
   }),
 };