refactor: use isTransparent naming and separate getImageMetadata

This commit is contained in:
midzelis
2026-02-21 01:56:15 +00:00
parent aaac6a4602
commit d3f2e98206
7 changed files with 85 additions and 74 deletions

View File

@@ -142,13 +142,7 @@ export class MediaRepository {
async decodeImage(input: string | Buffer, options: DecodeToBufferOptions) {
const pipeline = await this.getImageDecodingPipeline(input, options);
let hasAlpha = false;
if (options.checkAlpha) {
const metadata = await pipeline.metadata();
hasAlpha = metadata.hasAlpha ?? false;
}
const { data, info } = await pipeline.raw().toBuffer({ resolveWithObject: true });
return { data, info, hasAlpha };
return pipeline.raw().toBuffer({ resolveWithObject: true });
}
private async applyEdits(pipeline: sharp.Sharp, edits: AssetEditActionItem[]): Promise<sharp.Sharp> {
@@ -315,9 +309,9 @@ export class MediaRepository {
});
}
async getImageDimensions(input: string | Buffer): Promise<ImageDimensions> {
const { width = 0, height = 0 } = await sharp(input).metadata();
return { width, height };
async getImageMetadata(input: string | Buffer): Promise<ImageDimensions & { isTransparent: boolean }> {
const { width = 0, height = 0, hasAlpha = false } = await sharp(input).metadata();
return { width, height, isTransparent: hasAlpha };
}
private configureFfmpegCall(input: string, output: string | Writable, options: TranscodeCommand) {

View File

@@ -344,10 +344,11 @@ describe(MediaService.name, () => {
mocks.media.decodeImage.mockImplementation((input) =>
Promise.resolve(
typeof input === 'string'
? { data: rawBuffer, info: rawInfo as OutputInfo, hasAlpha: false } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo, hasAlpha: false }, // buffer implies embedded image extracted
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
),
);
mocks.media.getImageMetadata.mockResolvedValue({ width: 100, height: 100, isTransparent: false });
});
it('should skip thumbnail generation if asset not found', async () => {
@@ -417,7 +418,6 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -656,7 +656,6 @@ describe(MediaService.name, () => {
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.Srgb,
processInvalidImages: false,
size: 1440,
@@ -707,7 +706,6 @@ describe(MediaService.name, () => {
expect(mocks.storage.mkdirSync).toHaveBeenCalledWith(expect.any(String));
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.Srgb,
processInvalidImages: false,
size: 1440,
@@ -860,7 +858,7 @@ describe(MediaService.name, () => {
.exif({ fileSizeInByte: 5000, profileDescription: 'Adobe RGB', bitsPerSample: 14, orientation: undefined })
.build();
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
@@ -868,26 +866,51 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
});
});
it('should not check transparency metadata for raw files without extracted images', async () => {
const asset = AssetFactory.from({ originalFileName: 'file.dng' })
.exif({ fileSizeInByte: 5000, profileDescription: 'Adobe RGB', bitsPerSample: 14, orientation: undefined })
.build();
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: false } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.getImageMetadata).not.toHaveBeenCalled();
});
it('should not check transparency metadata for raw files with extracted images', async () => {
const asset = AssetFactory.from({ originalFileName: 'file.dng' })
.exif({ fileSizeInByte: 5000, profileDescription: 'Adobe RGB', bitsPerSample: 14, orientation: undefined })
.build();
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.getImageMetadata).toHaveBeenCalledOnce();
expect(mocks.media.getImageMetadata).toHaveBeenCalledWith(extractedBuffer);
});
it('should resize original image if embedded image is too small', async () => {
const asset = AssetFactory.from({ originalFileName: 'file.dng' })
.exif({ fileSizeInByte: 5000, profileDescription: 'Adobe RGB', bitsPerSample: 14, orientation: undefined })
.build();
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 1000, height: 1000 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 1000, height: 1000, isTransparent: false });
mocks.systemMetadata.get.mockResolvedValue({ image: { extractEmbedded: true } });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -905,7 +928,6 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -924,7 +946,6 @@ describe(MediaService.name, () => {
expect(mocks.media.extract).not.toHaveBeenCalled();
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440,
@@ -977,14 +998,13 @@ describe(MediaService.name, () => {
image: { fullsize: { enabled: true, format: ImageFormat.Webp }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
size: 1440, // capped to preview size as fullsize conversion is skipped
@@ -1016,14 +1036,13 @@ describe(MediaService.name, () => {
image: { fullsize: { enabled: true, format: ImageFormat.Webp }, extractEmbedded: true },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jxl });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(extractedBuffer, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1065,14 +1084,13 @@ describe(MediaService.name, () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true }, extractEmbedded: false } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1110,7 +1128,7 @@ describe(MediaService.name, () => {
it('should generate full-size preview from non-web-friendly images', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
@@ -1125,7 +1143,6 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: true,
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1150,14 +1167,13 @@ describe(MediaService.name, () => {
const asset = AssetFactory.from().exif().build();
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: true } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.assetJob.getForGenerateThumbnailJob.mockResolvedValue(asset);
await sut.handleGenerateThumbnails({ id: asset.id });
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: false,
colorspace: Colorspace.Srgb,
processInvalidImages: false,
size: 1440,
@@ -1174,7 +1190,7 @@ describe(MediaService.name, () => {
it('should always generate full-size preview from non-web-friendly panoramas', async () => {
mocks.systemMetadata.get.mockResolvedValue({ image: { fullsize: { enabled: false } } });
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
mocks.media.copyTagGroup.mockResolvedValue(true);
const asset = AssetFactory.from({ originalFileName: 'panorama.tif' })
@@ -1190,7 +1206,6 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: true,
colorspace: Colorspace.Srgb,
orientation: undefined,
processInvalidImages: false,
@@ -1221,7 +1236,7 @@ describe(MediaService.name, () => {
image: { fullsize: { enabled: true, format: ImageFormat.Webp, quality: 90 } },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
// HEIF/HIF image taken by cameras are not web-friendly, only has limited support on Safari.
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
@@ -1236,7 +1251,6 @@ describe(MediaService.name, () => {
expect(mocks.media.decodeImage).toHaveBeenCalledOnce();
expect(mocks.media.decodeImage).toHaveBeenCalledWith(asset.originalPath, {
checkAlpha: true,
colorspace: Colorspace.P3,
processInvalidImages: false,
});
@@ -1262,7 +1276,7 @@ describe(MediaService.name, () => {
image: { fullsize: { enabled: true, format: ImageFormat.Jpeg, progressive: true } },
});
mocks.media.extract.mockResolvedValue({ buffer: extractedBuffer, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue({ width: 3840, height: 2160 });
mocks.media.getImageMetadata.mockResolvedValue({ width: 3840, height: 2160, isTransparent: false });
const asset = AssetFactory.from({ originalFileName: 'image.hif' })
.exif({
fileSizeInByte: 5000,
@@ -1296,10 +1310,11 @@ describe(MediaService.name, () => {
mocks.media.decodeImage.mockImplementation((input) =>
Promise.resolve(
typeof input === 'string'
? { data: rawBuffer, info: rawInfo as OutputInfo, hasAlpha: false } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo, hasAlpha: false }, // buffer implies embedded image extracted
? { data: rawBuffer, info: rawInfo as OutputInfo } // string implies original file
: { data: fullsizeBuffer, info: rawInfo as OutputInfo }, // buffer implies embedded image extracted
),
);
mocks.media.getImageMetadata.mockResolvedValue({ width: 100, height: 100, isTransparent: false });
});
it('should skip videos', async () => {
@@ -1467,7 +1482,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1512,7 +1527,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1557,7 +1572,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1600,7 +1615,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1643,7 +1658,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 4624, height: 3080 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1686,7 +1701,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 4624, height: 3080 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1732,8 +1747,8 @@ describe(MediaService.name, () => {
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.Jpeg });
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.getImageDimensions.mockResolvedValue(info);
mocks.media.decodeImage.mockResolvedValue({ data, info });
mocks.media.getImageMetadata.mockResolvedValue({ width: 2160, height: 3840, isTransparent: false });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1776,7 +1791,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1792,7 +1807,7 @@ describe(MediaService.name, () => {
mocks.media.generateThumbnail.mockResolvedValue();
const data = Buffer.from('');
const info = { width: 2160, height: 3840 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,
@@ -1814,9 +1829,9 @@ describe(MediaService.name, () => {
const extracted = Buffer.from('');
const data = Buffer.from('');
const info = { width: 1000, height: 1000 } as OutputInfo;
mocks.media.decodeImage.mockResolvedValue({ data, info, hasAlpha: false });
mocks.media.decodeImage.mockResolvedValue({ data, info });
mocks.media.extract.mockResolvedValue({ buffer: extracted, format: RawExtractedFormat.Jpeg });
mocks.media.getImageDimensions.mockResolvedValue(info);
mocks.media.getImageMetadata.mockResolvedValue({ width: 1000, height: 1000, isTransparent: false });
await expect(sut.handleGeneratePersonThumbnail({ id: personStub.primaryPerson.id })).resolves.toBe(
JobStatus.Success,

View File

@@ -257,7 +257,7 @@ export class MediaService extends BaseService {
return extracted;
}
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number, checkAlpha?: boolean) {
private async decodeImage(thumbSource: string | Buffer, exifInfo: Exif, targetSize?: number) {
const { image } = await this.getConfig({ withCache: true });
const colorspace = this.isSRGB(exifInfo) ? Colorspace.Srgb : image.colorspace;
const decodeOptions: DecodeToBufferOptions = {
@@ -265,11 +265,10 @@ export class MediaService extends BaseService {
processInvalidImages: process.env.IMMICH_PROCESS_INVALID_IMAGES === 'true',
size: targetSize,
orientation: exifInfo.orientation ? Number(exifInfo.orientation) : undefined,
checkAlpha,
};
const { info, data, hasAlpha } = await this.mediaRepository.decodeImage(thumbSource, decodeOptions);
return { info, data, colorspace, hasAlpha };
const { info, data } = await this.mediaRepository.decodeImage(thumbSource, decodeOptions);
return { info, data, colorspace };
}
private async extractOriginalImage(asset: ThumbnailAsset, image: SystemConfig['image'], useEdits = false) {
@@ -281,16 +280,20 @@ export class MediaService extends BaseService {
useEdits;
const convertFullsize = generateFullsize && (!extracted || !mimeTypes.isWebSupportedImage(` .${extracted.format}`));
const checkAlpha = !extracted && mimeTypes.canHaveAlpha(asset.originalPath);
const { data, info, colorspace, hasAlpha } = await this.decodeImage(
extracted ? extracted.buffer : asset.originalPath,
const thumbSource = extracted ? extracted.buffer : asset.originalPath;
const { data, info, colorspace } = await this.decodeImage(
thumbSource,
// only specify orientation to extracted images which don't have EXIF orientation data
// or it can double rotate the image
extracted ? asset.exifInfo : { ...asset.exifInfo, orientation: null },
convertFullsize ? undefined : image.preview.size,
checkAlpha,
);
let isTransparent = false;
if (!extracted && mimeTypes.canBeTransparent(asset.originalPath)) {
({ isTransparent } = await this.mediaRepository.getImageMetadata(asset.originalPath));
}
return {
extracted,
data,
@@ -298,17 +301,17 @@ export class MediaService extends BaseService {
colorspace,
convertFullsize,
generateFullsize,
hasAlpha,
isTransparent,
};
}
private async generateImageThumbnails(asset: ThumbnailAsset, { image }: SystemConfig, useEdits: boolean = false) {
// Handle embedded preview extraction for RAW files
const extractedImage = await this.extractOriginalImage(asset, image, useEdits);
const { info, data, colorspace, generateFullsize, convertFullsize, extracted, hasAlpha } = extractedImage;
const { info, data, colorspace, generateFullsize, convertFullsize, extracted, isTransparent } = extractedImage;
const previewFormat = this.resolveFinalImageFormat(hasAlpha, image.preview.format, asset.id);
const thumbnailFormat = this.resolveFinalImageFormat(hasAlpha, image.thumbnail.format, asset.id);
const previewFormat = this.resolveFinalImageFormat(isTransparent, image.preview.format, asset.id);
const thumbnailFormat = this.resolveFinalImageFormat(isTransparent, image.thumbnail.format, asset.id);
const previewFile = this.getImageFile(asset, {
fileType: AssetFileType.Preview,
@@ -336,7 +339,7 @@ export class MediaService extends BaseService {
let fullsizeFile: UpsertFileOptions | undefined;
if (convertFullsize) {
const fullsizeFormat = this.resolveFinalImageFormat(hasAlpha, image.fullsize.format, asset.id);
const fullsizeFormat = this.resolveFinalImageFormat(isTransparent, image.fullsize.format, asset.id);
// convert a new fullsize image from the same source as the thumbnail
fullsizeFile = this.getImageFile(asset, {
fileType: AssetFileType.FullSize,
@@ -768,7 +771,7 @@ export class MediaService extends BaseService {
}
private async shouldUseExtractedImage(extractedPathOrBuffer: string | Buffer, targetSize: number) {
const { width, height } = await this.mediaRepository.getImageDimensions(extractedPathOrBuffer);
const { width, height } = await this.mediaRepository.getImageMetadata(extractedPathOrBuffer);
const extractedSize = Math.min(width, height);
return extractedSize >= targetSize;
}
@@ -867,8 +870,8 @@ export class MediaService extends BaseService {
return generated;
}
private resolveFinalImageFormat(sourceHasAlpha: boolean, format: ImageFormat, assetId: string): ImageFormat {
if (sourceHasAlpha && format === ImageFormat.Jpeg) {
private resolveFinalImageFormat(isTransparent: boolean, format: ImageFormat, assetId: string): ImageFormat {
if (isTransparent && format === ImageFormat.Jpeg) {
this.logger.debug(
`Overriding output format from ${format} to ${ImageFormat.Webp} to preserve alpha channel for asset ${assetId}`,
);

View File

@@ -62,7 +62,6 @@ type DecodeImageOptions = {
export interface DecodeToBufferOptions extends DecodeImageOptions {
size?: number;
orientation?: ExifOrientation;
checkAlpha?: boolean;
}
export type GenerateThumbnailOptions = Pick<ImageOptions, 'format' | 'quality' | 'progressive'> & DecodeToBufferOptions;

View File

@@ -153,7 +153,7 @@ describe('mimeTypes', () => {
}
});
describe('canHaveAlpha', () => {
describe('canBeTransparent', () => {
for (const img of [
'a.avif',
'a.bmp',
@@ -169,13 +169,13 @@ describe('mimeTypes', () => {
'a.webp',
]) {
it(`should return true for ${img}`, () => {
expect(mimeTypes.canHaveAlpha(img)).toBe(true);
expect(mimeTypes.canBeTransparent(img)).toBe(true);
});
}
for (const img of ['a.jpg', 'a.jpeg', 'a.jpe', 'a.insp', 'a.jp2', 'a.cr3', 'a.dng', 'a.nef', 'a.arw']) {
it(`should return false for ${img}`, () => {
expect(mimeTypes.canHaveAlpha(img)).toBe(false);
expect(mimeTypes.canBeTransparent(img)).toBe(false);
});
}
});

View File

@@ -77,7 +77,7 @@ const extensionOverrides: Record<string, string> = {
'image/jpeg': '.jpg',
};
const alphaCapableExtensions = new Set([
const transparentCapableExtensions = new Set([
'.avif',
'.bmp',
'.gif',
@@ -149,7 +149,7 @@ export const mimeTypes = {
isProfile: (filename: string) => isType(filename, profile),
isSidecar: (filename: string) => isType(filename, sidecar),
isVideo: (filename: string) => isType(filename, video),
canHaveAlpha: (filename: string) => alphaCapableExtensions.has(extname(filename).toLowerCase()),
canBeTransparent: (filename: string) => transparentCapableExtensions.has(extname(filename).toLowerCase()),
isRaw: (filename: string) => isType(filename, raw),
lookup,
/** return an extension (including a leading `.`) for a mime-type */

View File

@@ -8,10 +8,10 @@ export const newMediaRepositoryMock = (): Mocked<RepositoryInterface<MediaReposi
writeExif: vitest.fn().mockImplementation(() => Promise.resolve()),
copyTagGroup: vitest.fn().mockImplementation(() => Promise.resolve()),
generateThumbhash: vitest.fn().mockResolvedValue(Buffer.from('')),
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {}, hasAlpha: false }),
decodeImage: vitest.fn().mockResolvedValue({ data: Buffer.from(''), info: {} }),
extract: vitest.fn().mockResolvedValue(null),
probe: vitest.fn(),
transcode: vitest.fn(),
getImageDimensions: vitest.fn(),
getImageMetadata: vitest.fn(),
};
};