Compare commits

...

1 Commits

Author SHA1 Message Date
Daniel Dietzler
ce0123df30 fix: tag update race condition 2026-01-19 21:32:38 +01:00
12 changed files with 60 additions and 36 deletions

View File

@@ -457,6 +457,7 @@ export const columns = {
'asset_exif.projectionType',
'asset_exif.rating',
'asset_exif.state',
'asset_exif.tags',
'asset_exif.timeZone',
],
plugin: [
@@ -480,4 +481,5 @@ export const lockableProperties = [
'longitude',
'rating',
'timeZone',
'tags',
] as const;

View File

@@ -37,20 +37,6 @@ select
and "asset_file"."type" = $1
) as agg
) as "files",
(
select
coalesce(json_agg(agg), '[]')
from
(
select
"tag"."value"
from
"tag"
inner join "tag_asset" on "tag"."id" = "tag_asset"."tagId"
where
"asset"."id" = "tag_asset"."assetId"
) as agg
) as "tags",
to_json("asset_exif") as "exifInfo"
from
"asset"

View File

@@ -43,6 +43,7 @@ select
"asset_exif"."projectionType",
"asset_exif"."rating",
"asset_exif"."state",
"asset_exif"."tags",
"asset_exif"."timeZone"
from
"asset_exif"
@@ -127,6 +128,7 @@ select
"asset_exif"."projectionType",
"asset_exif"."rating",
"asset_exif"."state",
"asset_exif"."tags",
"asset_exif"."timeZone"
from
"asset_exif"

View File

@@ -1,6 +1,5 @@
import { Injectable } from '@nestjs/common';
import { Kysely } from 'kysely';
import { jsonArrayFrom } from 'kysely/helpers/postgres';
import { InjectKysely } from 'nestjs-kysely';
import { Asset, columns } from 'src/database';
import { DummyValue, GenerateSql } from 'src/decorators';
@@ -42,15 +41,6 @@ export class AssetJobRepository {
.where('asset.id', '=', asUuid(id))
.select(['id', 'originalPath'])
.select((eb) => withFiles(eb, AssetFileType.Sidecar))
.select((eb) =>
jsonArrayFrom(
eb
.selectFrom('tag')
.select(['tag.value'])
.innerJoin('tag_asset', 'tag.id', 'tag_asset.tagId')
.whereRef('asset.id', '=', 'tag_asset.assetId'),
).as('tags'),
)
.$call(withExifInner)
.limit(1)
.executeTakeFirst();

View File

@@ -178,6 +178,7 @@ export class AssetRepository {
bitsPerSample: ref('bitsPerSample'),
rating: ref('rating'),
fps: ref('fps'),
tags: ref('tags'),
lockedProperties:
lockedPropertiesBehavior === 'append'
? distinctLocked(eb, exif.lockedProperties ?? null)

View File

@@ -0,0 +1,9 @@
import { Kysely, sql } from 'kysely';
export async function up(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_exif" ADD "tags" character varying[];`.execute(db);
}
export async function down(db: Kysely<any>): Promise<void> {
await sql`ALTER TABLE "asset_exif" DROP COLUMN "tags";`.execute(db);
}

View File

@@ -93,6 +93,9 @@ export class AssetExifTable {
@Column({ type: 'integer', nullable: true })
rating!: number | null;
@Column({ type: 'character varying', array: true, nullable: true })
tags!: string[] | null;
@UpdateDateColumn({ default: () => 'clock_timestamp()' })
updatedAt!: Generated<Date>;

View File

@@ -387,6 +387,7 @@ describe(MetadataService.name, () => {
it('should extract tags from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
mockReadTags({ TagsList: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -397,6 +398,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from TagsList', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child'] } } as any);
mockReadTags({ TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -417,6 +419,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a string', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
mockReadTags({ Keywords: 'Parent' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -427,6 +430,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent'] } } as any);
mockReadTags({ Keywords: ['Parent'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -437,6 +441,7 @@ describe(MetadataService.name, () => {
it('should extract tags from Keywords as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent', '2024'] } } as any);
mockReadTags({ Keywords: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -448,6 +453,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchal tags from Keywords', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child'] } } as any);
mockReadTags({ Keywords: 'Parent/Child' });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -467,6 +473,7 @@ describe(MetadataService.name, () => {
it('should ignore Keywords when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'Child'] } } as any);
mockReadTags({ Keywords: 'Child', TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -486,6 +493,7 @@ describe(MetadataService.name, () => {
it('should extract hierarchy from HierarchicalSubject', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'TagA'] } } as any);
mockReadTags({ HierarchicalSubject: ['Parent|Child', 'TagA'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.childUpsert);
@@ -507,6 +515,7 @@ describe(MetadataService.name, () => {
it('should extract tags from HierarchicalSubject as a list with a number', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(removeNonSidecarFiles(assetStub.image));
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent', '2024'] } } as any);
mockReadTags({ HierarchicalSubject: ['Parent', 2024] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);
@@ -518,6 +527,7 @@ describe(MetadataService.name, () => {
it('should extract ignore / characters in a HierarchicalSubject tag', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Mom|Dad'] } } as any);
mockReadTags({ HierarchicalSubject: ['Mom/Dad'] });
mocks.tag.upsertValue.mockResolvedValueOnce(tagStub.parentUpsert);
@@ -532,6 +542,7 @@ describe(MetadataService.name, () => {
it('should ignore HierarchicalSubject when TagsList is present', async () => {
mocks.assetJob.getForMetadataExtraction.mockResolvedValue(assetStub.image);
mocks.asset.getById.mockResolvedValue({ exifInfo: { tags: ['Parent/Child', 'Parent2/Child2'] } } as any);
mockReadTags({ HierarchicalSubject: ['Parent2|Child2'], TagsList: ['Parent/Child'] });
mocks.tag.upsertValue.mockResolvedValue(tagStub.parentUpsert);

View File

@@ -254,6 +254,8 @@ export class MetadataService extends BaseService {
}
}
const tags = this.getTagList(exifTags);
const exifData: Insertable<AssetExifTable> = {
assetId: asset.id,
@@ -296,6 +298,8 @@ export class MetadataService extends BaseService {
// grouping
livePhotoCID: (exifTags.ContentIdentifier || exifTags.MediaGroupUUID) ?? null,
autoStackId: this.getAutoStackId(exifTags),
tags: tags.length > 0 ? tags : undefined,
};
const isSidewards = exifTags.Orientation && this.isOrientationSidewards(exifTags.Orientation);
@@ -316,9 +320,10 @@ export class MetadataService extends BaseService {
width: asset.width == null ? assetWidth : undefined,
height: asset.height == null ? assetHeight : undefined,
}),
this.applyTagList(asset, exifTags),
];
await this.applyTagList(asset);
if (this.isMotionPhoto(asset, exifTags)) {
promises.push(this.applyMotionPhotos(asset, exifTags, dates, stats));
}
@@ -405,35 +410,35 @@ export class MetadataService extends BaseService {
@OnEvent({ name: 'AssetTag' })
async handleTagAsset({ assetId }: ArgOf<'AssetTag'>) {
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId } });
}
@OnEvent({ name: 'AssetUntag' })
async handleUntagAsset({ assetId }: ArgOf<'AssetUntag'>) {
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId, tags: true } });
await this.jobRepository.queue({ name: JobName.SidecarWrite, data: { id: assetId } });
}
@OnJob({ name: JobName.SidecarWrite, queue: QueueName.Sidecar })
async handleSidecarWrite(job: JobOf<JobName.SidecarWrite>): Promise<JobStatus> {
const { id, tags } = job;
const { id } = job;
const asset = await this.assetJobRepository.getForSidecarWriteJob(id);
if (!asset) {
return JobStatus.Failed;
}
const lockedProperties = await this.assetJobRepository.getLockedPropertiesForMetadataExtraction(id);
const tagsList = (asset.tags || []).map((tag) => tag.value);
const { sidecarFile } = getAssetFiles(asset.files);
const sidecarPath = sidecarFile?.path || `${asset.originalPath}.xmp`;
const { description, dateTimeOriginal, latitude, longitude, rating } = _.pick(
const { description, dateTimeOriginal, latitude, longitude, rating, tags } = _.pick(
{
description: asset.exifInfo.description,
dateTimeOriginal: asset.exifInfo.dateTimeOriginal,
latitude: asset.exifInfo.latitude,
longitude: asset.exifInfo.longitude,
rating: asset.exifInfo.rating,
tags: asset.exifInfo.tags,
},
lockedProperties,
);
@@ -446,7 +451,7 @@ export class MetadataService extends BaseService {
GPSLatitude: latitude,
GPSLongitude: longitude,
Rating: rating,
TagsList: tags ? tagsList : undefined,
TagsList: tags?.length ? tags : undefined,
},
_.isUndefined,
);
@@ -560,11 +565,14 @@ export class MetadataService extends BaseService {
return tags;
}
private async applyTagList(asset: { id: string; ownerId: string }, exifTags: ImmichTags) {
const tags = this.getTagList(exifTags);
const results = await upsertTags(this.tagRepository, { userId: asset.ownerId, tags });
private async applyTagList({ id, ownerId }: { id: string; ownerId: string }) {
const asset = await this.assetRepository.getById(id, { exifInfo: true });
const results = await upsertTags(this.tagRepository, {
userId: ownerId,
tags: asset?.exifInfo?.tags ?? [],
});
await this.tagRepository.replaceAssetTags(
asset.id,
id,
results.map((tag) => tag.id),
);
}

View File

@@ -90,6 +90,7 @@ export class TagService extends BaseService {
const results = await this.tagRepository.upsertAssetIds(items);
for (const assetId of new Set(results.map((item) => item.assetId))) {
await this.updateTags(assetId);
await this.eventRepository.emit('AssetTag', { assetId });
}
@@ -107,6 +108,7 @@ export class TagService extends BaseService {
for (const { id: assetId, success } of results) {
if (success) {
await this.updateTags(assetId);
await this.eventRepository.emit('AssetTag', { assetId });
}
}
@@ -125,6 +127,7 @@ export class TagService extends BaseService {
for (const { id: assetId, success } of results) {
if (success) {
await this.updateTags(assetId);
await this.eventRepository.emit('AssetUntag', { assetId });
}
}
@@ -145,4 +148,12 @@ export class TagService extends BaseService {
}
return tag;
}
private async updateTags(assetId: string) {
const asset = await this.assetRepository.getById(assetId, { exifInfo: true });
await this.assetRepository.upsertExif(
{ assetId, tags: asset?.exifInfo?.tags ?? [] },
{ lockedPropertiesBehavior: 'append' },
);
}
}

View File

@@ -317,7 +317,7 @@ export type JobItem =
// Sidecar Scanning
| { name: JobName.SidecarQueueAll; data: IBaseJob }
| { name: JobName.SidecarCheck; data: IEntityJob }
| { name: JobName.SidecarWrite; data: ISidecarWriteJob }
| { name: JobName.SidecarWrite; data: IEntityJob }
// Facial Recognition
| { name: JobName.AssetDetectFacesQueueAll; data: IBaseJob }

View File

@@ -147,6 +147,7 @@ export const sharedLinkStub = {
visibility: AssetVisibility.Timeline,
width: 500,
height: 500,
tags: [],
},
sharedLinks: [],
faces: [],