* feat: add OCR functionality and related configurations

* chore: update labeler configuration for machine learning files

* feat(i18n): enhance OCR model descriptions and add orientation classification and unwarping features

* chore: update Dockerfile to include ccache for improved build performance

* feat(ocr): enhance OCR model configuration with orientation classification and unwarping options, update PaddleOCR integration, and improve response structure

* refactor(ocr): remove OCR_CLEANUP job from enum and type definitions

* refactor(ocr): remove obsolete OCR entity and migration files, and update asset job status and schema to accommodate new OCR table structure

* refactor(ocr): update OCR schema and response structure to use individual coordinates instead of bounding box, and adjust related service and repository files

* feat: enhance OCR configuration and functionality

- Updated OCR settings to include minimum detection box score, minimum detection score, and minimum recognition score.
- Refactored PaddleOCRecognizer to utilize new scoring parameters.
- Introduced new database tables for asset OCR data and search functionality.
- Modified related services and repositories to support the new OCR features.
- Updated translations for improved clarity in settings UI.

* sql changes

* use rapidocr

* change dto

* update web

* update lock

* update api

* store positions as normalized floats

* match column order in db

* update admin ui settings descriptions

fix max resolution key

set min threshold to 0.1

fix bind

* apply config correctly, adjust defaults

* unnecessary model type

* unnecessary sources

* fix(ocr): switch RapidOCR lang type from LangDet to LangRec

* fix(ocr): expose lang_type (LangRec.CH) and font_path on OcrOptions for RapidOCR

* fix(ocr): make OCR text search case- and accent-insensitive using ILIKE + unaccent

* fix(ocr): add OCR search fields

* fix: Add OCR database migration and update ML prediction logic.

* trigrams are already case insensitive

* add tests

* format

* update migrations

* wrong uuid function

* linting

* maybe fix medium tests

* formatting

* fix weblate check

* openapi

* sql

* minor fixes

* maybe fix medium tests part 2

* passing medium tests

* format web

* readd sql

* format dart

* disabled in e2e

* chore: translation ordering

---------

Co-authored-by: mertalev <101130780+mertalev@users.noreply.github.com>
Co-authored-by: Alex Tran <alex.tran1502@gmail.com>
This commit is contained in:
Kang
2025-10-27 22:09:55 +08:00
committed by GitHub
parent c666dc6c67
commit 02b29046b3
90 changed files with 3610 additions and 1722 deletions

View File

@@ -27,8 +27,10 @@ import { EmailRepository } from 'src/repositories/email.repository';
import { EventRepository } from 'src/repositories/event.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { MemoryRepository } from 'src/repositories/memory.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { SearchRepository } from 'src/repositories/search.repository';
@@ -47,6 +49,7 @@ import { VersionHistoryRepository } from 'src/repositories/version-history.repos
import { DB } from 'src/schema';
import { AlbumTable } from 'src/schema/tables/album.table';
import { AssetExifTable } from 'src/schema/tables/asset-exif.table';
import { AssetFileTable } from 'src/schema/tables/asset-file.table';
import { AssetJobStatusTable } from 'src/schema/tables/asset-job-status.table';
import { AssetTable } from 'src/schema/tables/asset.table';
import { FaceSearchTable } from 'src/schema/tables/face-search.table';
@@ -169,6 +172,11 @@ export class MediumTestContext<S extends BaseService = BaseService> {
return { asset, result };
}
async newAssetFile(dto: Insertable<AssetFileTable>) {
const result = await this.get(AssetRepository).upsertFile(dto);
return { result };
}
async newAssetFace(dto: Partial<Insertable<AssetFace>> & { assetId: string }) {
const assetFace = mediumFactory.assetFaceInsert(dto);
const result = await this.get(PersonRepository).createAssetFace(assetFace);
@@ -307,6 +315,7 @@ const newRealRepository = <T>(key: ClassConstructor<T>, db: Kysely<DB>): T => {
case AssetJobRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case SearchRepository:
@@ -359,6 +368,7 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
case CryptoRepository:
case MemoryRepository:
case NotificationRepository:
case OcrRepository:
case PartnerRepository:
case PersonRepository:
case SessionRepository:
@@ -407,6 +417,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
return automock(LoggingRepository, { args: [undefined, configMock], strict: false });
}
case MachineLearningRepository: {
return automock(MachineLearningRepository, { args: [{ setContext: () => {} }] });
}
case StorageRepository: {
return automock(StorageRepository, { args: [{ setContext: () => {} }] });
}

View File

@@ -0,0 +1,243 @@
import { Kysely } from 'kysely';
import { AssetFileType, JobStatus } from 'src/enum';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { MachineLearningRepository } from 'src/repositories/machine-learning.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { SystemMetadataRepository } from 'src/repositories/system-metadata.repository';
import { DB } from 'src/schema';
import { OcrService } from 'src/services/ocr.service';
import { newMediumService } from 'test/medium.factory';
import { getKyselyDB } from 'test/utils';
let defaultDatabase: Kysely<DB>;
const setup = (db?: Kysely<DB>) => {
return newMediumService(OcrService, {
database: db || defaultDatabase,
real: [AssetRepository, AssetJobRepository, ConfigRepository, OcrRepository, SystemMetadataRepository],
mock: [JobRepository, LoggingRepository, MachineLearningRepository],
});
};
beforeAll(async () => {
defaultDatabase = await getKyselyDB();
});
describe(OcrService.name, () => {
it('should work', () => {
const { sut } = setup();
expect(sut).toBeDefined();
});
it('should parse asset', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.99,
id: expect.any(String),
text: 'Test OCR',
textScore: 0.95,
x1: 10,
y1: 10,
x2: 50,
y2: 10,
x3: 50,
y3: 50,
x4: 10,
y4: 50,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'Test OCR',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle multiple boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: Array.from({ length: 8 * 5 }, (_, i) => i),
boxScore: [0.7, 0.67, 0.65, 0.62, 0.6],
text: ['One', 'Two', 'Three', 'Four', 'Five'],
textScore: [0.9, 0.89, 0.88, 0.87, 0.86],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([
{
assetId: asset.id,
boxScore: 0.7,
id: expect.any(String),
text: 'One',
textScore: 0.9,
x1: 0,
y1: 1,
x2: 2,
y2: 3,
x3: 4,
y3: 5,
x4: 6,
y4: 7,
},
{
assetId: asset.id,
boxScore: 0.67,
id: expect.any(String),
text: 'Two',
textScore: 0.89,
x1: 8,
y1: 9,
x2: 10,
y2: 11,
x3: 12,
y3: 13,
x4: 14,
y4: 15,
},
{
assetId: asset.id,
boxScore: 0.65,
id: expect.any(String),
text: 'Three',
textScore: 0.88,
x1: 16,
y1: 17,
x2: 18,
y2: 19,
x3: 20,
y3: 21,
x4: 22,
y4: 23,
},
{
assetId: asset.id,
boxScore: 0.62,
id: expect.any(String),
text: 'Four',
textScore: 0.87,
x1: 24,
y1: 25,
x2: 26,
y2: 27,
x3: 28,
y3: 29,
x4: 30,
y4: 31,
},
{
assetId: asset.id,
boxScore: 0.6,
id: expect.any(String),
text: 'Five',
textScore: 0.86,
x1: 32,
y1: 33,
x2: 34,
y2: 35,
x3: 36,
y3: 37,
x4: 38,
y4: 39,
},
]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toEqual({
assetId: asset.id,
text: 'One Two Three Four Five',
});
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should handle no boxes', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
await expect(
ctx.database
.selectFrom('asset_job_status')
.select('asset_job_status.ocrAt')
.where('assetId', '=', asset.id)
.executeTakeFirst(),
).resolves.toEqual({ ocrAt: expect.any(Date) });
});
it('should update existing results', async () => {
const { sut, ctx } = setup();
const { user } = await ctx.newUser();
const { asset } = await ctx.newAsset({ ownerId: user.id });
await ctx.newAssetFile({ assetId: asset.id, type: AssetFileType.Preview, path: 'preview.jpg' });
const machineLearningMock = ctx.getMock(MachineLearningRepository);
machineLearningMock.ocr.mockResolvedValue({
box: [10, 10, 50, 10, 50, 50, 10, 50],
boxScore: [0.99],
text: ['Test OCR'],
textScore: [0.95],
});
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
machineLearningMock.ocr.mockResolvedValue({ box: [], boxScore: [], text: [], textScore: [] });
await expect(sut.handleOcr({ id: asset.id })).resolves.toBe(JobStatus.Success);
const ocrRepository = ctx.get(OcrRepository);
await expect(ocrRepository.getByAssetId(asset.id)).resolves.toEqual([]);
await expect(
ctx.database.selectFrom('ocr_search').selectAll().where('assetId', '=', asset.id).executeTakeFirst(),
).resolves.toBeUndefined();
});
});

View File

@@ -41,6 +41,7 @@ import { MetadataRepository } from 'src/repositories/metadata.repository';
import { MoveRepository } from 'src/repositories/move.repository';
import { NotificationRepository } from 'src/repositories/notification.repository';
import { OAuthRepository } from 'src/repositories/oauth.repository';
import { OcrRepository } from 'src/repositories/ocr.repository';
import { PartnerRepository } from 'src/repositories/partner.repository';
import { PersonRepository } from 'src/repositories/person.repository';
import { ProcessRepository } from 'src/repositories/process.repository';
@@ -230,6 +231,7 @@ export type ServiceOverrides = {
metadata: MetadataRepository;
move: MoveRepository;
notification: NotificationRepository;
ocr: OcrRepository;
oauth: OAuthRepository;
partner: PartnerRepository;
person: PersonRepository;
@@ -302,6 +304,7 @@ export const newTestService = <T extends BaseService>(
metadata: newMetadataRepositoryMock(),
move: automock(MoveRepository, { strict: false }),
notification: automock(NotificationRepository),
ocr: automock(OcrRepository, { strict: false }),
oauth: automock(OAuthRepository, { args: [loggerMock] }),
partner: automock(PartnerRepository, { strict: false }),
person: automock(PersonRepository, { strict: false }),
@@ -357,6 +360,7 @@ export const newTestService = <T extends BaseService>(
overrides.move || (mocks.move as As<MoveRepository>),
overrides.notification || (mocks.notification as As<NotificationRepository>),
overrides.oauth || (mocks.oauth as As<OAuthRepository>),
overrides.ocr || (mocks.ocr as As<OcrRepository>),
overrides.partner || (mocks.partner as As<PartnerRepository>),
overrides.person || (mocks.person as As<PersonRepository>),
overrides.process || (mocks.process as As<ProcessRepository>),