Compare commits

..

1 Commits

Author SHA1 Message Date
shenlong-tanwen
f0fcc3a0e3 base job implementation 2025-10-22 21:53:51 +05:30
1670 changed files with 34596 additions and 145650 deletions

View File

@@ -29,12 +29,6 @@
]
}
},
"features": {
"ghcr.io/devcontainers/features/docker-in-docker:2": {
// https://github.com/devcontainers/features/issues/1466
"moby": false
}
},
"forwardPorts": [3000, 9231, 9230, 2283],
"portsAttributes": {
"3000": {

View File

@@ -21,7 +21,6 @@ services:
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
- ../plugins:/build/corePlugin
immich-web:
env_file: !reset []
immich-machine-learning:

2
.github/.nvmrc vendored
View File

@@ -1 +1 @@
24.13.0
22.20.0

2
.github/labeler.yml vendored
View File

@@ -31,7 +31,7 @@ documentation:
🧠machine-learning:
- changed-files:
- any-glob-to-any-file:
- machine-learning/**
- machine-learning/app/**
changelog:translation:
- head-branch: ['^chore/translations$']

10
.github/mise.toml vendored
View File

@@ -1,10 +0,0 @@
[tasks.install]
run = "pnpm install --filter github --frozen-lockfile"
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."

View File

@@ -4,6 +4,6 @@
"format:fix": "prettier --write ."
},
"devDependencies": {
"prettier": "^3.7.4"
"prettier": "^3.5.3"
}
}

View File

@@ -1,16 +1,12 @@
name: Build Mobile
on:
workflow_dispatch:
workflow_call:
inputs:
ref:
required: false
type: string
environment:
description: 'Target environment'
required: true
default: 'development'
type: string
secrets:
KEY_JKS:
required: true
@@ -20,18 +16,6 @@ on:
required: true
ANDROID_STORE_PASSWORD:
required: true
APP_STORE_CONNECT_API_KEY_ID:
required: true
APP_STORE_CONNECT_API_KEY_ISSUER_ID:
required: true
APP_STORE_CONNECT_API_KEY:
required: true
IOS_CERTIFICATE_P12:
required: true
IOS_CERTIFICATE_PASSWORD:
required: true
FASTLANE_TEAM_ID:
required: true
pull_request:
push:
branches: [main]
@@ -84,7 +68,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
@@ -96,14 +80,14 @@ jobs:
working-directory: ./mobile
run: printf "%s" $KEY_JKS | base64 -d > android/key.jks
- uses: actions/setup-java@f2beeb24e141e01a676f977032f5a29d81c9e27e # v5.1.0
- uses: actions/setup-java@dded0888837ed1f317902acf8a20df0ad188d165 # v5.0.0
with:
distribution: 'zulu'
java-version: '17'
- name: Restore Gradle Cache
id: cache-gradle-restore
uses: actions/cache/restore@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
uses: actions/cache/restore@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
with:
path: |
~/.gradle/caches
@@ -153,14 +137,14 @@ jobs:
fi
- name: Publish Android Artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: release-apk-signed
path: mobile/build/app/outputs/flutter-apk/*.apk
- name: Save Gradle Cache
id: cache-gradle-save
uses: actions/cache/save@9255dc7a253b0ccc959486e2bca901246202afeb # v5.0.1
uses: actions/cache/save@0057852bfaa89a56745cba8c7296529d2fc39830 # v4.3.0
if: github.ref == 'refs/heads/main'
with:
path: |
@@ -170,123 +154,3 @@ jobs:
mobile/android/.gradle
mobile/.dart_tool
key: ${{ steps.cache-gradle-restore.outputs.cache-primary-key }}
build-sign-ios:
name: Build and sign iOS
needs: pre-job
permissions:
contents: read
# Run on main branch or workflow_dispatch, or on PRs/other branches (build only, no upload)
if: ${{ !github.event.pull_request.head.repo.fork && fromJSON(needs.pre-job.outputs.should_run).mobile == true }}
runs-on: macos-latest
steps:
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6
with:
ref: ${{ inputs.ref || github.sha }}
persist-credentials: false
- name: Setup Flutter SDK
uses: subosito/flutter-action@fd55f4c5af5b953cc57a2be44cb082c8f6635e8e # v2
with:
channel: 'stable'
flutter-version-file: ./mobile/pubspec.yaml
cache: true
- name: Install Flutter dependencies
working-directory: ./mobile
run: flutter pub get
- name: Generate translation files
run: dart run easy_localization:generate -S ../i18n && dart run bin/generate_keys.dart
working-directory: ./mobile
- name: Generate platform APIs
run: make pigeon
working-directory: ./mobile
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: '3.3'
bundler-cache: true
working-directory: ./mobile/ios
- name: Install CocoaPods dependencies
working-directory: ./mobile/ios
run: |
pod install
- name: Create API Key
env:
API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
API_KEY_CONTENT: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
working-directory: ./mobile/ios
run: |
mkdir -p ~/.appstoreconnect/private_keys
echo "$API_KEY_CONTENT" | base64 --decode > ~/.appstoreconnect/private_keys/AuthKey_${API_KEY_ID}.p8
- name: Import Certificate
env:
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
working-directory: ./mobile/ios
run: |
# Decode certificate
echo "$IOS_CERTIFICATE_P12" | base64 --decode > certificate.p12
- name: Create keychain and import certificate
env:
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
working-directory: ./mobile/ios
run: |
# Create keychain
security create-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
security default-keychain -s build.keychain
security unlock-keychain -p "$KEYCHAIN_PASSWORD" build.keychain
security set-keychain-settings -t 3600 -u build.keychain
# Import certificate
security import certificate.p12 -k build.keychain -P "$CERTIFICATE_PASSWORD" -T /usr/bin/codesign -T /usr/bin/security
security set-key-partition-list -S apple-tool:,apple: -s -k "$KEYCHAIN_PASSWORD" build.keychain
# Verify certificate was imported
security find-identity -v -p codesigning build.keychain
- name: Build and deploy to TestFlight
env:
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
KEYCHAIN_NAME: build.keychain
KEYCHAIN_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
ENVIRONMENT: ${{ inputs.environment || 'development' }}
BUNDLE_ID_SUFFIX: ${{ inputs.environment == 'production' && '' || 'development' }}
GITHUB_REF: ${{ github.ref }}
working-directory: ./mobile/ios
run: |
# Only upload to TestFlight on main branch
if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then
if [[ "$ENVIRONMENT" == "development" ]]; then
bundle exec fastlane gha_testflight_dev
else
bundle exec fastlane gha_release_prod
fi
else
# Build only, no TestFlight upload for non-main branches
bundle exec fastlane gha_build_only
fi
- name: Clean up keychain
if: always()
run: |
security delete-keychain build.keychain || true
- name: Upload IPA artifact
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
with:
name: ios-release-ipa
path: mobile/ios/Runner.ipa

View File

@@ -25,7 +25,7 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Check out code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}

View File

@@ -35,7 +35,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
@@ -44,7 +44,7 @@ jobs:
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './cli/.nvmrc'
registry-url: 'https://registry.npmjs.org'
@@ -78,16 +78,16 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Set up QEMU
uses: docker/setup-qemu-action@c7c53464625b32c7a7e944ae62b3e17d2b600130 # v3.7.0
uses: docker/setup-qemu-action@29109295f81e9208d7d86ff1c6c12d2833863392 # v3.6.0
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@8d2750c68a42422c14e847fe6c8ac0403b4cbd6f # v3.12.0
uses: docker/setup-buildx-action@e468171a9de216ec08956ac3ada2f0791b6bd435 # v3.11.1
- name: Login to GitHub Container Registry
uses: docker/login-action@5e57cd118135c172c3672efd75eb46360885c0ef # v3.6.0
@@ -95,7 +95,7 @@ jobs:
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
password: ${{ secrets.GITHUB_TOKEN }}
password: ${{ steps.token.outputs.token }}
- name: Get package version
id: package-version
@@ -105,7 +105,7 @@ jobs:
- name: Generate docker image tags
id: metadata
uses: docker/metadata-action@c299e40c65443455700f0fdfc63efafe5b349051 # v5.10.0
uses: docker/metadata-action@c1e51972afc2121e065aed6d45c65596fe445f3f # v5.8.0
with:
flavor: |
latest=false
@@ -125,3 +125,4 @@ jobs:
cache-to: type=gha,mode=max
tags: ${{ steps.metadata.outputs.tags }}
labels: ${{ steps.metadata.outputs.labels }}
github-token: ${{ steps.token.outputs.token }}

View File

@@ -35,7 +35,7 @@ jobs:
needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }}
container:
image: ghcr.io/immich-app/mdq:main@sha256:ab9f163cd5d5cec42704a26ca2769ecf3f10aa8e7bae847f1d527cdf075946e6
image: ghcr.io/immich-app/mdq:main@sha256:6b8450bfc06770af1af66bce9bf2ced7d1d9b90df1a59fc4c83a17777a9f6723
outputs:
checked: ${{ steps.get_checkbox.outputs.checked }}
steps:
@@ -54,10 +54,16 @@ jobs:
issues: write
discussions: write
steps:
- id: token
uses: immich-app/devtools/actions/create-workflow-token@da177fa133657503ddb7503f8ba53dccefec5da1 # create-workflow-token-action-v1.0.0
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Close issue
if: ${{ github.event_name == 'issues' }}
env:
GH_TOKEN: ${{ github.token }}
GH_TOKEN: ${{ steps.token.outputs.token }}
NODE_ID: ${{ github.event.issue.node_id }}
run: |
gh api graphql \
@@ -83,7 +89,7 @@ jobs:
- name: Close discussion
if: ${{ github.event_name == 'discussion' && github.event.discussion.category.name == 'Feature Request' }}
env:
GH_TOKEN: ${{ github.token }}
GH_TOKEN: ${{ steps.token.outputs.token }}
NODE_ID: ${{ github.event.discussion.node_id }}
run: |
gh api graphql \

View File

@@ -50,14 +50,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout repository
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/init@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -70,7 +70,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/autobuild@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -83,6 +83,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@5d4e8d1aca955e8d8589aabd499c5cae939e33c7 # v4.31.9
uses: github/codeql-action/analyze@755f44910c12a3d7ca0d8c6e42c048b3362f7cec # v3.30.8
with:
category: '/language:${{matrix.language}}'

View File

@@ -116,23 +116,24 @@ jobs:
matrix:
include:
- device: cpu
tag-suffix: ''
- device: cuda
suffixes: '-cuda'
tag-suffix: '-cuda'
platforms: linux/amd64
- device: openvino
suffixes: '-openvino'
tag-suffix: '-openvino'
platforms: linux/amd64
- device: armnn
suffixes: '-armnn'
tag-suffix: '-armnn'
platforms: linux/arm64
- device: rknn
suffixes: '-rknn'
tag-suffix: '-rknn'
platforms: linux/arm64
- device: rocm
suffixes: '-rocm'
tag-suffix: '-rocm'
platforms: linux/amd64
runner-mapping: '{"linux/amd64": "mich"}'
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
permissions:
contents: read
actions: read
@@ -146,7 +147,7 @@ jobs:
dockerfile: machine-learning/Dockerfile
platforms: ${{ matrix.platforms }}
runner-mapping: ${{ matrix.runner-mapping }}
suffixes: ${{ matrix.suffixes }}
tag-suffix: ${{ matrix.tag-suffix }}
dockerhub-push: ${{ github.event_name == 'release' }}
build-args: |
DEVICE=${{ matrix.device }}
@@ -155,7 +156,7 @@ jobs:
name: Build and Push Server
needs: pre-job
if: ${{ fromJSON(needs.pre-job.outputs.should_run).server == true }}
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@0477486d82313fba68f7c82c034120a4b8981297 # multi-runner-build-workflow-v2.1.0
uses: immich-app/devtools/.github/workflows/multi-runner-build.yml@946acac326940f8badf09ccf591d9cb345d6a689 # multi-runner-build-workflow-v0.2.1
permissions:
contents: read
actions: read

View File

@@ -60,17 +60,16 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
fetch-depth: 0
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './docs/.nvmrc'
cache: 'pnpm'
@@ -86,7 +85,7 @@ jobs:
run: pnpm build
- name: Upload build output
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
with:
name: docs-build-output
path: docs/build/

View File

@@ -5,9 +5,6 @@ on:
types:
- completed
env:
TG_NON_INTERACTIVE: 'true'
jobs:
checks:
name: Docs Deploy Checks
@@ -125,13 +122,13 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup Mise
uses: immich-app/devtools/actions/use-mise@b868e6e7c8cc212beec876330b4059e661ee44bb # use-mise-action-v1.1.1
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
- name: Load parameters
id: parameters
@@ -174,7 +171,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run //deployment:tf apply'
run: 'mise run tf apply'
- name: Deploy Docs Subdomain Output
id: docs-output
@@ -185,11 +182,15 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run tf output -json'
- name: Output Cleaning
id: clean
env:
TG_OUTPUT: ${{ steps.docs-output.outputs.tg_action_output }}
run: |
mise run //deployment:tf output -- -json | jq -r '
"projectName=\(.pages_project_name.value)",
"subdomain=\(.immich_app_branch_subdomain.value)"
' >> $GITHUB_OUTPUT
CLEANED=$(echo "$TG_OUTPUT" | sed 's|%0A|\n|g ; s|%3C|<|g' | jq -c .)
echo "output=$CLEANED" >> $GITHUB_OUTPUT
- name: Publish to Cloudflare Pages
# TODO: Action is deprecated
@@ -197,7 +198,7 @@ jobs:
with:
apiToken: ${{ secrets.CLOUDFLARE_API_TOKEN_PAGES_UPLOAD }}
accountId: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
projectName: ${{ steps.docs-output.outputs.projectName }}
projectName: ${{ fromJson(steps.clean.outputs.output).pages_project_name.value }}
workingDirectory: 'docs'
directory: 'build'
branch: ${{ steps.parameters.outputs.name }}
@@ -211,7 +212,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs-release'
run: 'mise run //deployment:tf apply'
run: 'mise run tf apply'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0
@@ -220,6 +221,6 @@ jobs:
token: ${{ steps.token.outputs.token }}
number: ${{ fromJson(needs.checks.outputs.parameters).pr_number }}
body: |
📖 Documentation deployed to [${{ steps.docs-output.outputs.subdomain }}](https://${{ steps.docs-output.outputs.subdomain }})
📖 Documentation deployed to [${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }}](https://${{ fromJson(steps.clean.outputs.output).immich_app_branch_subdomain.value }})
emojis: 'rocket'
body-include: '<!-- Docs PR URL -->'

View File

@@ -5,9 +5,6 @@ on:
permissions: {}
env:
TG_NON_INTERACTIVE: 'true'
jobs:
deploy:
name: Docs Destroy
@@ -23,13 +20,13 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup Mise
uses: immich-app/devtools/actions/use-mise@b868e6e7c8cc212beec876330b4059e661ee44bb # use-mise-action-v1.1.1
uses: immich-app/devtools/actions/use-mise@cd24790a7f5f6439ac32cc94f5523cb2de8bfa8c # use-mise-action-v1.1.0
- name: Destroy Docs Subdomain
env:
@@ -39,7 +36,7 @@ jobs:
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
TF_STATE_POSTGRES_CONN_STR: ${{ secrets.TF_STATE_POSTGRES_CONN_STR }}
working-directory: 'deployment/modules/cloudflare/docs'
run: 'mise run //deployment:tf destroy -- -refresh=false'
run: 'mise run tf destroy -refresh=false'
- name: Comment
uses: actions-cool/maintain-one-comment@4b2dbf086015f892dcb5e8c1106f5fccd6c1476b # v3.2.0

View File

@@ -16,13 +16,13 @@ jobs:
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: 'Checkout'
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
ref: ${{ github.event.pull_request.head.ref }}
token: ${{ steps.generate-token.outputs.token }}
@@ -32,14 +32,14 @@ jobs:
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Fix formatting
run: pnpm --recursive install && pnpm run --recursive --parallel fix:format
run: make install-all && make format-all
- name: Commit and push
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4

View File

@@ -31,7 +31,7 @@ jobs:
- name: Generate a token
id: generate_token
if: ${{ inputs.skip != true }}
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}

View File

@@ -45,31 +45,30 @@ jobs:
needs: [merge_translations]
outputs:
ref: ${{ steps.push-tag.outputs.commit_long_sha }}
version: ${{ steps.output.outputs.version }}
permissions: {} # No job-level permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
ref: main
- name: Install uv
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
@@ -81,16 +80,13 @@ jobs:
MOBILE_BUMP: ${{ inputs.mobileBump }}
run: misc/release/pump-version.sh -s "${SERVER_BUMP}" -m "${MOBILE_BUMP}"
- id: output
run: echo "version=$IMMICH_VERSION" >> $GITHUB_OUTPUT
- name: Commit and tag
id: push-tag
uses: EndBug/add-and-commit@a94899bca583c204427a224a7af87c02f9b325d5 # v9.1.4
with:
default_author: github_actions
message: 'chore: version ${{ steps.output.outputs.version }}'
tag: ${{ steps.output.outputs.version }}
message: 'chore: version ${{ env.IMMICH_VERSION }}'
tag: ${{ env.IMMICH_VERSION }}
push: true
build_mobile:
@@ -103,55 +99,40 @@ jobs:
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
# iOS secrets
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
with:
ref: ${{ needs.bump_version.outputs.ref }}
environment: production
prepare_release:
runs-on: ubuntu-latest
needs: [build_mobile, bump_version]
needs: build_mobile
permissions:
actions: read # To download the app artifact
# No content permissions are needed because it uses the app-token
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
- name: Download APK
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
uses: actions/download-artifact@634f93cb2916e3fdff6788551b99b062d0335ce0 # v5.0.0
with:
name: release-apk-signed
github-token: ${{ steps.generate-token.outputs.token }}
- name: Create draft release
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
uses: softprops/action-gh-release@6da8fa9354ddfdc4aeace5fc48d7f679b5214090 # v2.4.1
with:
draft: true
tag_name: ${{ needs.bump_version.outputs.version }}
tag_name: ${{ env.IMMICH_VERSION }}
token: ${{ steps.generate-token.outputs.token }}
generate_release_notes: true
body_path: misc/release/notes.tmpl

View File

@@ -1,170 +0,0 @@
name: Manage release PR
on:
workflow_dispatch:
push:
branches:
- main
concurrency:
group: ${{ github.workflow }}
cancel-in-progress: true
permissions: {}
jobs:
bump:
runs-on: ubuntu-latest
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: true
ref: main
- name: Install uv
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Determine release type
id: bump-type
uses: ietf-tools/semver-action@c90370b2958652d71c06a3484129a4d423a6d8a8 # v1.11.0
with:
token: ${{ steps.generate-token.outputs.token }}
- name: Bump versions
env:
TYPE: ${{ steps.bump-type.outputs.bump }}
run: |
if [ "$TYPE" == "none" ]; then
exit 1 # TODO: Is there a cleaner way to abort the workflow?
fi
misc/release/pump-version.sh -s $TYPE -m true
- name: Manage Outline release document
id: outline
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
env:
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
NEXT_VERSION: ${{ steps.bump-type.outputs.next }}
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const fs = require('fs');
const outlineKey = process.env.OUTLINE_API_KEY;
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9'
const collectionId = 'e2910656-714c-4871-8721-447d9353bd73';
const baseUrl = 'https://outline.immich.cloud';
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ parentDocumentId })
});
if (!listResponse.ok) {
throw new Error(`Outline list failed: ${listResponse.statusText}`);
}
const listData = await listResponse.json();
const allDocuments = listData.data || [];
const document = allDocuments.find(doc => doc.title === 'next');
let documentId;
let documentUrl;
let documentText;
if (!document) {
// Create new document
console.log('No existing document found. Creating new one...');
const notesTmpl = fs.readFileSync('misc/release/notes.tmpl', 'utf8');
const createResponse = await fetch(`${baseUrl}/api/documents.create`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
title: 'next',
text: notesTmpl,
collectionId: collectionId,
parentDocumentId: parentDocumentId,
publish: true
})
});
if (!createResponse.ok) {
throw new Error(`Failed to create document: ${createResponse.statusText}`);
}
const createData = await createResponse.json();
documentId = createData.data.id;
const urlId = createData.data.urlId;
documentUrl = `${baseUrl}/doc/next-${urlId}`;
documentText = createData.data.text || '';
console.log(`Created new document: ${documentUrl}`);
} else {
documentId = document.id;
const docPath = document.url;
documentUrl = `${baseUrl}${docPath}`;
documentText = document.text || '';
console.log(`Found existing document: ${documentUrl}`);
}
// Generate GitHub release notes
console.log('Generating GitHub release notes...');
const releaseNotesResponse = await github.rest.repos.generateReleaseNotes({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: `${process.env.NEXT_VERSION}`,
});
// Combine the content
const changelog = `
# ${process.env.NEXT_VERSION}
${documentText}
${releaseNotesResponse.data.body}
---
`
const existingChangelog = fs.existsSync('CHANGELOG.md') ? fs.readFileSync('CHANGELOG.md', 'utf8') : '';
fs.writeFileSync('CHANGELOG.md', changelog + existingChangelog, 'utf8');
core.setOutput('document_url', documentUrl);
- name: Create PR
id: create-pr
uses: peter-evans/create-pull-request@98357b18bf14b5342f975ff684046ec3b2a07725 # v8.0.0
with:
token: ${{ steps.generate-token.outputs.token }}
commit-message: 'chore: release ${{ steps.bump-type.outputs.next }}'
title: 'chore: release ${{ steps.bump-type.outputs.next }}'
body: 'Release notes: ${{ steps.outline.outputs.document_url }}'
labels: 'changelog:skip'
branch: 'release/next'
draft: true

View File

@@ -1,148 +0,0 @@
name: release.yml
on:
pull_request:
types: [closed]
paths:
- CHANGELOG.md
jobs:
# Maybe double check PR source branch?
merge_translations:
uses: ./.github/workflows/merge-translations.yml
permissions:
pull-requests: write
secrets:
PUSH_O_MATIC_APP_ID: ${{ secrets.PUSH_O_MATIC_APP_ID }}
PUSH_O_MATIC_APP_KEY: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
WEBLATE_TOKEN: ${{ secrets.WEBLATE_TOKEN }}
build_mobile:
uses: ./.github/workflows/build-mobile.yml
needs: merge_translations
permissions:
contents: read
secrets:
KEY_JKS: ${{ secrets.KEY_JKS }}
ALIAS: ${{ secrets.ALIAS }}
ANDROID_KEY_PASSWORD: ${{ secrets.ANDROID_KEY_PASSWORD }}
ANDROID_STORE_PASSWORD: ${{ secrets.ANDROID_STORE_PASSWORD }}
# iOS secrets
APP_STORE_CONNECT_API_KEY_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ID }}
APP_STORE_CONNECT_API_KEY_ISSUER_ID: ${{ secrets.APP_STORE_CONNECT_API_KEY_ISSUER_ID }}
APP_STORE_CONNECT_API_KEY: ${{ secrets.APP_STORE_CONNECT_API_KEY }}
IOS_CERTIFICATE_P12: ${{ secrets.IOS_CERTIFICATE_P12 }}
IOS_CERTIFICATE_PASSWORD: ${{ secrets.IOS_CERTIFICATE_PASSWORD }}
IOS_PROVISIONING_PROFILE: ${{ secrets.IOS_PROVISIONING_PROFILE }}
IOS_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_SHARE_EXTENSION }}misc/release/notes.tmpl
IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_SHARE_EXTENSION }}
IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION: ${{ secrets.IOS_DEVELOPMENT_PROVISIONING_PROFILE_WIDGET_EXTENSION }}
FASTLANE_TEAM_ID: ${{ secrets.FASTLANE_TEAM_ID }}
with:
ref: main
environment: production
prepare_release:
runs-on: ubuntu-latest
needs: build_mobile
permissions:
actions: read # To download the app artifact
steps:
- name: Generate a token
id: generate-token
uses: actions/create-github-app-token@29824e69f54612133e76f7eaac726eef6c875baf # v2.2.1
with:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
with:
token: ${{ steps.generate-token.outputs.token }}
persist-credentials: false
ref: main
- name: Extract changelog
id: changelog
run: |
CHANGELOG_PATH=$RUNNER_TEMP/changelog.md
sed -n '1,/^---$/p' CHANGELOG.md | head -n -1 > $CHANGELOG_PATH
echo "path=$CHANGELOG_PATH" >> $GITHUB_OUTPUT
VERSION=$(sed -n 's/^# //p' $CHANGELOG_PATH)
echo "version=$VERSION" >> $GITHUB_OUTPUT
- name: Download APK
uses: actions/download-artifact@37930b1c2abaa49bbe596cd826c3c89aef350131 # v7.0.0
with:
name: release-apk-signed
github-token: ${{ steps.generate-token.outputs.token }}
- name: Create draft release
uses: softprops/action-gh-release@a06a81a03ee405af7f2048a818ed3f03bbf83c7b # v2.5.0
with:
tag_name: ${{ steps.version.outputs.result }}
token: ${{ steps.generate-token.outputs.token }}
body_path: ${{ steps.changelog.outputs.path }}
draft: true
files: |
docker/docker-compose.yml
docker/example.env
docker/hwaccel.ml.yml
docker/hwaccel.transcoding.yml
docker/prometheus.yml
*.apk
- name: Rename Outline document
uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
continue-on-error: true
env:
OUTLINE_API_KEY: ${{ secrets.OUTLINE_API_KEY }}
VERSION: ${{ steps.changelog.outputs.version }}
with:
github-token: ${{ steps.generate-token.outputs.token }}
script: |
const outlineKey = process.env.OUTLINE_API_KEY;
const version = process.env.VERSION;
const parentDocumentId = 'da856355-0844-43df-bd71-f8edce5382d9';
const baseUrl = 'https://outline.immich.cloud';
const listResponse = await fetch(`${baseUrl}/api/documents.list`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({ parentDocumentId })
});
if (!listResponse.ok) {
throw new Error(`Outline list failed: ${listResponse.statusText}`);
}
const listData = await listResponse.json();
const allDocuments = listData.data || [];
const document = allDocuments.find(doc => doc.title === 'next');
if (document) {
console.log(`Found document 'next', renaming to '${version}'...`);
const updateResponse = await fetch(`${baseUrl}/api/documents.update`, {
method: 'POST',
headers: {
'Authorization': `Bearer ${outlineKey}`,
'Content-Type': 'application/json'
},
body: JSON.stringify({
id: document.id,
title: version
})
});
if (!updateResponse.ok) {
throw new Error(`Failed to rename document: ${updateResponse.statusText}`);
}
} else {
console.log('No document titled "next" found to rename');
}

View File

@@ -22,7 +22,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
@@ -31,7 +31,7 @@ jobs:
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
# Setup .npmrc file to publish to npm
- uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
- uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './open-api/typescript-sdk/.nvmrc'
registry-url: 'https://registry.npmjs.org'

View File

@@ -55,7 +55,7 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}

View File

@@ -69,7 +69,7 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
@@ -77,7 +77,7 @@ jobs:
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
@@ -114,14 +114,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
@@ -161,14 +161,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './cli/.nvmrc'
cache: 'pnpm'
@@ -203,14 +203,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
@@ -247,14 +247,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
@@ -285,22 +285,22 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './web/.nvmrc'
cache: 'pnpm'
cache-dependency-path: '**/pnpm-lock.yaml'
- name: Install dependencies
run: pnpm --filter=immich-i18n install --frozen-lockfile
run: pnpm --filter=immich-web install --frozen-lockfile
- name: Format
run: pnpm --filter=immich-i18n format:fix
run: pnpm --filter=immich-web format:i18n
- name: Find file changes
uses: tj-actions/verify-changed-files@a1c6acee9df209257a246f2cc6ae8cb6581c1edf # v20.0.4
id: verify-changed-files
@@ -333,14 +333,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
@@ -379,15 +379,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
submodules: 'recursive'
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
@@ -418,7 +417,7 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
submodules: 'recursive'
@@ -426,7 +425,7 @@ jobs:
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
@@ -473,7 +472,7 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
submodules: 'recursive'
@@ -481,7 +480,7 @@ jobs:
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './e2e/.nvmrc'
cache: 'pnpm'
@@ -500,16 +499,8 @@ jobs:
run: docker compose build
if: ${{ !cancelled() }}
- name: Run e2e tests (web)
env:
CI: true
run: npx playwright test
if: ${{ !cancelled() }}
- name: Archive test results
uses: actions/upload-artifact@b7c566a772e6b6bfb58ed0dc250532a479d7789f # v6.0.0
if: success() || failure()
with:
name: e2e-web-test-results-${{ matrix.runner }}
path: e2e/playwright-report/
success-check-e2e:
name: End-to-End Tests Success
needs: [e2e-tests-server-cli, e2e-tests-web]
@@ -534,7 +525,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
@@ -566,14 +557,17 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Install uv
uses: astral-sh/setup-uv@681c641aba71e4a1c380be3ab5e12ad51f415867 # v7.1.6
with:
python-version: 3.11
uses: astral-sh/setup-uv@d0cc045d04ccac9d8b7881df0226f9e82c39688e # v6.8.0
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
# TODO: add caching when supported (https://github.com/actions/setup-python/pull/818)
# with:
# python-version: 3.11
# cache: 'uv'
- name: Install dependencies
run: |
uv sync --extra cpu
@@ -607,14 +601,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './.github/.nvmrc'
cache: 'pnpm'
@@ -636,7 +630,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
- uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
@@ -658,14 +652,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'
@@ -720,14 +714,14 @@ jobs:
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- name: Checkout code
uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 # v6.0.1
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
token: ${{ steps.token.outputs.token }}
- name: Setup pnpm
uses: pnpm/action-setup@41ff72655975bd51cab0327fa583b6e92b6d3061 # v4.2.0
- name: Setup Node
uses: actions/setup-node@395ad3262231945c25e8478fd5baf05154b1d79f # v6.1.0
uses: actions/setup-node@a0853c24544627f65ddf259abe73b1d18a591444 # v5.0.0
with:
node-version-file: './server/.nvmrc'
cache: 'pnpm'

View File

@@ -36,7 +36,8 @@ jobs:
github-token: ${{ steps.token.outputs.token }}
filters: |
i18n:
- modified: 'i18n/!(en)**\.json'
- 'i18n/!(en)**\.json'
exclude-branches: 'chore/translations'
skip-force-logic: 'true'
enforce-lock:

View File

@@ -52,7 +52,7 @@
},
"cSpell.words": ["immich"],
"editor.formatOnSave": true,
"eslint.validate": ["javascript", "typescript", "svelte"],
"eslint.validate": ["javascript", "svelte"],
"explorer.fileNesting.enabled": true,
"explorer.fileNesting.patterns": {
"*.dart": "${capture}.g.dart,${capture}.gr.dart,${capture}.drift.dart",

View File

@@ -1,31 +0,0 @@
# Contributing to Immich
We appreciate every contribution, and we're happy about every new contributor. So please feel invited to help make Immich a better product!
## Getting started
To get you started quickly we have detailed guides for the dev setup on our [website](https://docs.immich.app/developer/setup). If you prefer, you can also use [Devcontainers](https://docs.immich.app/developer/devcontainers).
There are also additional resources about Immich's architecture, database migrations, the use of OpenAPI, and more in our [developer documentation](https://docs.immich.app/developer/architecture).
## General
Please try to keep pull requests as focused as possible. A PR should do exactly one thing and not bleed into other, unrelated areas. The smaller a PR, the fewer changes are likely needed, and the quicker it will likely be merged. For larger/more impactful PRs, please reach out to us first to discuss your plans. The best way to do this is through our [Discord](https://discord.immich.app). We have a dedicated `#contributing` channel there. Additionally, please fill out the entire template when opening a PR.
## Finding work
If you are looking for something to work on, there are discussions and issues with a `good-first-issue` label on them. These are always a good starting point. If none of them sound interesting or fit your skill set, feel free to reach out on our Discord. We're happy to help you find something to work on!
## Use of generative AI
We generally discourage PRs entirely generated by an LLM. For any part generated by an LLM, please put extra effort into your self-review. By using generative AI without proper self-review, the time you save ends up being more work we need to put in for proper reviews and code cleanup. Please keep that in mind when submitting code by an LLM. Clearly state the use of LLMs/(generative) AI in your pull request as requested by the template.
## Feature freezes
From time to time, we put a feature freeze on parts of the codebase. For us, this means we won't accept most PRs that make changes in that area. Exempted from this are simple bug fixes that require only minor changes. We will close feature PRs that target a feature-frozen area, even if that feature is highly requested and you put a lot of work into it. Please keep that in mind, and if you're ever uncertain if a PR would be accepted, reach out to us first (e.g., in the aforementioned `#contributing` channel). We hate to throw away work. Currently, we have feature freezes on:
* Sharing/Asset ownership
* (External) libraries
## Non-code contributions
If you want to contribute to Immich but you don't feel comfortable programming in our tech stack, there are other ways you can help the team. All our translations are done through [Weblate](https://hosted.weblate.org/projects/immich). These rely entirely on the community; if you speak a language that isn't fully translated yet, submitting translations there is greatly appreciated! If you like helping others, answering Q&A discussions here on GitHub and replying to people on our Discord is also always appreciated.

View File

@@ -17,9 +17,6 @@ dev-docs:
e2e:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --remove-orphans
e2e-dev:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.dev.yml up --remove-orphans
e2e-update:
@trap 'make e2e-down' EXIT; COMPOSE_BAKE=true docker compose -f ./e2e/docker-compose.yml up --build -V --remove-orphans

View File

@@ -118,16 +118,16 @@ Read more about translations [here](https://docs.immich.app/developer/translatio
## Star history
<a href="https://star-history.com/#immich-app/immich&type=date&legend=top-left">
<a href="https://star-history.com/#immich-app/immich&Date">
<picture>
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=date" width="100%" />
<source media="(prefers-color-scheme: dark)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date&theme=dark" />
<source media="(prefers-color-scheme: light)" srcset="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" />
<img alt="Star History Chart" src="https://api.star-history.com/svg?repos=immich-app/immich&type=Date" width="100%" />
</picture>
</a>
## Contributors
<a href="https://github.com/immich-app/immich/graphs/contributors">
<a href="https://github.com/alextran1502/immich/graphs/contributors">
<img src="https://contrib.rocks/image?repo=immich-app/immich" width="100%"/>
</a>

View File

@@ -1 +1 @@
24.13.0
22.20.0

View File

@@ -1,4 +1,4 @@
FROM node:24.1.0-alpine3.20@sha256:8fe019e0d57dbdce5f5c27c0b63d2775cf34b00e3755a7dea969802d7e0c2b25 AS core
FROM node:22.16.0-alpine3.20@sha256:2289fb1fba0f4633b08ec47b94a89c7e20b829fc5679f9b7b298eaa2f1ed8b7e AS core
WORKDIR /usr/src/app
COPY package* pnpm* .pnpmfile.cjs ./

View File

@@ -1,29 +0,0 @@
[tasks.install]
run = "pnpm install --filter @immich/cli --frozen-lockfile"
[tasks.build]
env._.path = "./node_modules/.bin"
run = "vite build"
[tasks.test]
env._.path = "./node_modules/.bin"
run = "vite"
[tasks.lint]
env._.path = "./node_modules/.bin"
run = "eslint \"src/**/*.ts\" --max-warnings 0"
[tasks."lint-fix"]
run = { task = "lint --fix" }
[tasks.format]
env._.path = "./node_modules/.bin"
run = "prettier --check ."
[tasks."format-fix"]
env._.path = "./node_modules/.bin"
run = "prettier --write ."
[tasks.check]
env._.path = "./node_modules/.bin"
run = "tsc --noEmit"

View File

@@ -1,6 +1,6 @@
{
"name": "@immich/cli",
"version": "2.2.105",
"version": "2.2.97",
"description": "Command Line Interface (CLI) for Immich",
"type": "module",
"exports": "./dist/index.js",
@@ -20,7 +20,7 @@
"@types/lodash-es": "^4.17.12",
"@types/micromatch": "^4.0.9",
"@types/mock-fs": "^4.13.1",
"@types/node": "^24.10.8",
"@types/node": "^22.18.10",
"@vitest/coverage-v8": "^3.0.0",
"byte-size": "^9.0.0",
"cli-progress": "^3.12.0",
@@ -28,15 +28,15 @@
"eslint": "^9.14.0",
"eslint-config-prettier": "^10.1.8",
"eslint-plugin-prettier": "^5.1.3",
"eslint-plugin-unicorn": "^62.0.0",
"eslint-plugin-unicorn": "^60.0.0",
"globals": "^16.0.0",
"mock-fs": "^5.2.0",
"prettier": "^3.7.4",
"prettier": "^3.2.5",
"prettier-plugin-organize-imports": "^4.0.0",
"typescript": "^5.3.3",
"typescript-eslint": "^8.28.0",
"vite": "^7.0.0",
"vite-tsconfig-paths": "^6.0.0",
"vite-tsconfig-paths": "^5.0.0",
"vitest": "^3.0.0",
"vitest-fetch-mock": "^0.4.0",
"yaml": "^2.3.1"
@@ -69,6 +69,6 @@
"micromatch": "^4.0.8"
},
"volta": {
"node": "24.13.0"
"node": "22.20.0"
}
}

View File

@@ -271,7 +271,7 @@ describe('startWatch', () => {
});
});
it('should filter out ignored patterns', async () => {
it('should filger out ignored patterns', async () => {
const testFilePath = path.join(testFolder, 'test.jpg');
const ignoredPattern = 'ignored';
const ignoredFolder = path.join(testFolder, ignoredPattern);

View File

@@ -37,7 +37,6 @@ export interface UploadOptionsDto {
dryRun?: boolean;
skipHash?: boolean;
delete?: boolean;
deleteDuplicates?: boolean;
album?: boolean;
albumName?: string;
includeHidden?: boolean;
@@ -71,8 +70,10 @@ const uploadBatch = async (files: string[], options: UploadOptionsDto) => {
console.log(JSON.stringify({ newFiles, duplicates, newAssets }, undefined, 4));
}
await updateAlbums([...newAssets, ...duplicates], options);
await deleteFiles(newAssets, duplicates, options);
await deleteFiles(
newAssets.map(({ filepath }) => filepath),
options,
);
};
export const startWatch = async (
@@ -405,46 +406,28 @@ const uploadFile = async (input: string, stats: Stats): Promise<AssetMediaRespon
return response.json();
};
const deleteFiles = async (uploaded: Asset[], duplicates: Asset[], options: UploadOptionsDto): Promise<void> => {
let fileCount = 0;
if (options.delete) {
fileCount += uploaded.length;
}
if (options.deleteDuplicates) {
fileCount += duplicates.length;
}
if (options.dryRun) {
console.log(`Would have deleted ${fileCount} local asset${s(fileCount)}`);
const deleteFiles = async (files: string[], options: UploadOptionsDto): Promise<void> => {
if (!options.delete) {
return;
}
if (fileCount === 0) {
if (options.dryRun) {
console.log(`Would have deleted ${files.length} local asset${s(files.length)}`);
return;
}
console.log('Deleting assets that have been uploaded...');
const deletionProgress = new SingleBar(
{ format: 'Deleting local assets | {bar} | {percentage}% | ETA: {eta}s | {value}/{total} assets' },
Presets.shades_classic,
);
deletionProgress.start(fileCount, 0);
const chunkDelete = async (files: Asset[]) => {
for (const assetBatch of chunk(files, options.concurrency)) {
await Promise.all(assetBatch.map((input: Asset) => unlink(input.filepath)));
deletionProgress.update(assetBatch.length);
}
};
deletionProgress.start(files.length, 0);
try {
if (options.delete) {
await chunkDelete(uploaded);
}
if (options.deleteDuplicates) {
await chunkDelete(duplicates);
for (const assetBatch of chunk(files, options.concurrency)) {
await Promise.all(assetBatch.map((input: string) => unlink(input)));
deletionProgress.update(assetBatch.length);
}
} finally {
deletionProgress.stop();

View File

@@ -8,7 +8,6 @@ import { serverInfo } from 'src/commands/server-info';
import { version } from '../package.json';
const defaultConfigDirectory = path.join(os.homedir(), '.config/immich/');
const defaultConcurrency = Math.max(1, os.cpus().length - 1);
const program = new Command()
.name('immich')
@@ -67,7 +66,7 @@ program
.addOption(
new Option('-c, --concurrency <number>', 'Number of assets to upload at the same time')
.env('IMMICH_UPLOAD_CONCURRENCY')
.default(defaultConcurrency),
.default(4),
)
.addOption(
new Option('-j, --json-output', 'Output detailed information in json format')
@@ -75,11 +74,6 @@ program
.default(false),
)
.addOption(new Option('--delete', 'Delete local assets after upload').env('IMMICH_DELETE_ASSETS'))
.addOption(
new Option('--delete-duplicates', 'Delete local assets that are duplicates (already exist on server)').env(
'IMMICH_DELETE_DUPLICATES',
),
)
.addOption(new Option('--no-progress', 'Hide progress bars').env('IMMICH_PROGRESS_BAR').default(true))
.addOption(
new Option('--watch', 'Watch for changes and upload automatically')

View File

@@ -299,7 +299,7 @@ describe('crawl', () => {
.map(([file]) => file);
// Compare file's content instead of path since a file can be represent in multiple ways.
expect(actual.map((path) => readContent(path)).toSorted()).toEqual(expected.toSorted());
expect(actual.map((path) => readContent(path)).sort()).toEqual(expected.sort());
});
}
});

View File

@@ -160,7 +160,7 @@ export const crawl = async (options: CrawlOptions): Promise<string[]> => {
ignore: [`**/${exclusionPattern}`],
});
globbedFiles.push(...crawledFiles);
return globbedFiles.toSorted();
return globbedFiles.sort();
};
export const sha1 = (filepath: string) => {

View File

@@ -9,7 +9,7 @@
"experimentalDecorators": true,
"allowSyntheticDefaultImports": true,
"resolveJsonModule": true,
"target": "es2023",
"target": "es2022",
"sourceMap": true,
"outDir": "./dist",
"incremental": true,

View File

@@ -1,20 +0,0 @@
[tools]
terragrunt = "0.93.10"
opentofu = "1.10.7"
[tasks."tg:fmt"]
run = "terragrunt hclfmt"
description = "Format terragrunt files"
[tasks.tf]
run = "terragrunt run --all"
description = "Wrapper for terragrunt run-all"
dir = "{{cwd}}"
[tasks."tf:fmt"]
run = "tofu fmt -recursive tf/"
description = "Format terraform files"
[tasks."tf:init"]
run = { task = "tf init -- -reconfigure" }
dir = "{{cwd}}"

View File

@@ -41,7 +41,6 @@ services:
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
- ../plugins:/build/corePlugin
env_file:
- .env
environment:
@@ -58,6 +57,10 @@ services:
IMMICH_THIRD_PARTY_BUG_FEATURE_URL: https://github.com/immich-app/immich/issues
IMMICH_THIRD_PARTY_DOCUMENTATION_URL: https://docs.immich.app
IMMICH_THIRD_PARTY_SUPPORT_URL: https://docs.immich.app/community-guides
ulimits:
nofile:
soft: 1048576
hard: 1048576
ports:
- 9230:9230
- 9231:9231
@@ -96,6 +99,10 @@ services:
- app-node_modules:/usr/src/app/node_modules
- sveltekit:/usr/src/app/web/.svelte-kit
- coverage:/usr/src/app/web/coverage
ulimits:
nofile:
soft: 1048576
hard: 1048576
restart: unless-stopped
depends_on:
immich-server:
@@ -115,7 +122,7 @@ services:
ports:
- 3003:3003
volumes:
- ../machine-learning/immich_ml:/usr/src/immich_ml
- ../machine-learning:/usr/src/app
- model-cache:/cache
env_file:
- .env
@@ -127,7 +134,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
healthcheck:
test: redis-cli ping || exit 1
@@ -146,8 +153,6 @@ services:
ports:
- 5432:5432
shm_size: 128mb
healthcheck:
disable: false
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
# immich-prometheus:
# container_name: immich_prometheus

View File

@@ -56,7 +56,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -77,15 +77,13 @@ services:
- 5432:5432
shm_size: 128mb
restart: always
healthcheck:
disable: false
# set IMMICH_TELEMETRY_INCLUDE=all in .env to enable metrics
immich-prometheus:
container_name: immich_prometheus
ports:
- 9090:9090
image: prom/prometheus@sha256:1f0f50f06acaceb0f5670d2c8a658a599affe7b0d8e78b898c1035653849a702
image: prom/prometheus@sha256:63805ebb8d2b3920190daf1cb14a60871b16fd38bed42b857a3182bc621f4996
volumes:
- ./prometheus.yml:/etc/prometheus/prometheus.yml
- prometheus-data:/prometheus
@@ -97,7 +95,7 @@ services:
command: ['./run.sh', '-disable-reporting']
ports:
- 3000:3000
image: grafana/grafana:12.3.1-ubuntu@sha256:d57f1365197aec34c4d80869d8ca45bb7787c7663904950dab214dfb40c1c2fd
image: grafana/grafana:12.1.1-ubuntu@sha256:d1da838234ff2de93e0065ee1bf0e66d38f948dcc5d718c25fa6237e14b4424a
volumes:
- grafana-data:/var/lib/grafana

View File

@@ -49,7 +49,7 @@ services:
redis:
container_name: immich_redis
image: docker.io/valkey/valkey:9@sha256:546304417feac0874c3dd576e0952c6bb8f06bb4093ea0c9ca303c73cf458f63
image: docker.io/valkey/valkey:8@sha256:81db6d39e1bba3b3ff32bd3a1b19a6d69690f94a3954ec131277b9a26b95b3aa
healthcheck:
test: redis-cli ping || exit 1
restart: always
@@ -69,8 +69,6 @@ services:
- ${DB_DATA_LOCATION}:/var/lib/postgresql/data
shm_size: 128mb
restart: always
healthcheck:
disable: false
volumes:
model-cache:

View File

@@ -9,8 +9,8 @@ DB_DATA_LOCATION=./postgres
# To set a timezone, uncomment the next line and change Etc/UTC to a TZ identifier from this list: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones#List
# TZ=Etc/UTC
# The Immich version to use. You can pin this to a specific version like "v2.1.0"
IMMICH_VERSION=v2
# The Immich version to use. You can pin this to a specific version like "v1.71.0"
IMMICH_VERSION=release
# Connection secret for postgres. You should change it to a random password
# Please use only the characters `A-Za-z0-9`, without special characters or spaces

View File

@@ -1 +1 @@
24.13.0
22.20.0

View File

@@ -22,7 +22,7 @@ For organizations seeking to resell Immich, we have established the following gu
- Do not misrepresent your reseller site or services as being officially affiliated with or endorsed by Immich or our development team.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase product keys directly from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
- For small resellers who wish to contribute financially to Immich's development, we recommend directing your customers to purchase licenses directly from us rather than attempting to broker revenue-sharing arrangements. We ask that you refrain from misrepresenting reseller activities as directly supporting our development work.
When in doubt or if you have an edge case scenario, we encourage you to contact us directly via email to discuss the use of our trademark. We can provide clear guidance on what is acceptable and what is not. You can reach out at: questions@immich.app
@@ -133,9 +133,9 @@ There are a few different scenarios that can lead to this situation. The solutio
The job is only automatically run once per asset after upload. If metadata extraction originally failed, the jobs were cleared/canceled, etc.,
the job may not have run automatically the first time.
### How can I hide a photo or video from the timeline?
### How can I hide photos from the timeline?
You can _archive_ them. This will hide the asset from the main timeline and folder view, but it will still show up in searches. All archived assets can be found in the _Archive_ view
You can _archive_ them.
### How can I backup data from Immich?

View File

@@ -5,7 +5,9 @@ import TabItem from '@theme/TabItem';
A [3-2-1 backup strategy](https://www.backblaze.com/blog/the-3-2-1-backup-strategy/) is recommended to protect your data. You should keep copies of your uploaded photos/videos as well as the Immich database for a comprehensive backup solution. This page provides an overview on how to backup the database and the location of user-uploaded pictures and videos. A template bash script that can be run as a cron job is provided [here](/guides/template-backup-script.md)
The instructions on this page show you how to prepare your Immich instance to be backed up, and which files to take a backup of. _You still need to take care of using an actual backup tool to make a backup yourself._
:::danger
The instructions on this page show you how to prepare your Immich instance to be backed up, and which files to take a backup of. You still need to take care of using an actual backup tool to make a backup yourself.
:::
## Database
@@ -39,7 +41,7 @@ By default, Immich will keep the last 14 database dumps and create a new dump ev
#### Trigger Dump
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/queues).
You are able to trigger a database dump in the [admin job status page](http://my.immich.app/admin/jobs-status).
Visit the page, open the "Create job" modal from the top right, select "Create Database Dump" and click "Confirm".
A job will run and trigger a dump, you can verify this worked correctly by checking the logs or the `backups/` folder.
This dumps will count towards the last `X` dumps that will be kept based on your settings.

View File

@@ -1,18 +0,0 @@
# Maintenance Mode
Maintenance mode is used to perform administrative tasks such as restoring backups to Immich.
You can enter maintenance mode by either:
- Selecting "enable maintenance mode" in system settings in administration.
- Running the enable maintenance mode [administration command](./server-commands.md).
## Logging in during maintenance
Maintenance mode uses a separate login system which is handled automatically behind the scenes in most cases. Enabling maintenance mode in settings will automatically log you into maintenance mode when the server comes back up.
If you find that you've been logged out, you can:
- Open the logs for the Immich server and look for _"🚧 Immich is in maintenance mode, you can log in using the following URL:"_
- Run the enable maintenance mode [administration command](./server-commands.md) again, this will give you a new URL to login with.
- Run the disable maintenance mode [administration command](./server-commands.md) then re-enter through system settings.

View File

@@ -10,19 +10,16 @@ Running with a pre-existing Postgres server can unlock powerful administrative f
## Prerequisites
You must install pgvector as it is a prerequisite for VectorChord.
You must install `pgvector` (`>= 0.7.0, < 1.0.0`), as it is a prerequisite for `vchord`.
The easiest way to do this on Debian/Ubuntu is by adding the [PostgreSQL Apt repository][pg-apt] and then
running `apt install postgresql-NN-pgvector`, where `NN` is your Postgres version (e.g., `16`).
You must install VectorChord into your instance of Postgres using their [instructions][vchord-install]. After installation, add `shared_preload_libraries = 'vchord.so'` to your `postgresql.conf`. If you already have some `shared_preload_libraries` set, you can separate each extension with a comma. For example, `shared_preload_libraries = 'pg_stat_statements, vchord.so'`.
:::note Supported versions
Immich is known to work with Postgres versions `>= 14, < 19`.
:::note
Immich is known to work with Postgres versions `>= 14, < 18`.
VectorChord is known to work with pgvector versions `>= 0.7, < 0.9`.
The Immich server will check the VectorChord version on startup to ensure compatibility, and refuse to start if a compatible version is not found.
The current accepted range for VectorChord is `>= 0.3, < 2.0`.
Make sure the installed version of VectorChord is compatible with your version of Immich. The current accepted range for VectorChord is `>= 0.3.0, < 0.5.0`.
:::
## Specifying the connection URL

View File

@@ -21,12 +21,6 @@ server {
# allow large file uploads
client_max_body_size 50000M;
# disable buffering uploads to prevent OOM on reverse proxy server and make uploads twice as fast (no pause)
proxy_request_buffering off;
# increase body buffer to avoid limiting upload speed
client_body_buffer_size 1024k;
# Set headers
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
@@ -35,6 +29,8 @@ server {
# enable websockets: http://nginx.org/en/docs/http/websocket.html
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_redirect off;
# set timeout
@@ -44,8 +40,6 @@ server {
location / {
proxy_pass http://<backend_url>:2283;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
# useful when using Let's Encrypt http-01 challenge

View File

@@ -2,19 +2,17 @@
The `immich-server` docker image comes preinstalled with an administrative CLI (`immich-admin`) that supports the following commands:
| Command | Description |
| -------------------------- | ------------------------------------------------------------- |
| `help` | Display help |
| `reset-admin-password` | Reset the password for the admin user |
| `disable-password-login` | Disable password login |
| `enable-password-login` | Enable password login |
| `disable-maintenance-mode` | Disable maintenance mode |
| `enable-maintenance-mode` | Enable maintenance mode |
| `enable-oauth-login` | Enable OAuth login |
| `disable-oauth-login` | Disable OAuth login |
| `list-users` | List Immich users |
| `version` | Print Immich version |
| `change-media-location` | Change database file paths to align with a new media location |
| Command | Description |
| ------------------------ | ------------------------------------------------------------- |
| `help` | Display help |
| `reset-admin-password` | Reset the password for the admin user |
| `disable-password-login` | Disable password login |
| `enable-password-login` | Enable password login |
| `enable-oauth-login` | Enable OAuth login |
| `disable-oauth-login` | Disable OAuth login |
| `list-users` | List Immich users |
| `version` | Print Immich version |
| `change-media-location` | Change database file paths to align with a new media location |
## How to run a command
@@ -49,23 +47,6 @@ immich-admin enable-password-login
Password login has been enabled.
```
Disable Maintenance Mode
```
immich-admin disable-maintenance-mode
Maintenance mode has been disabled.
```
Enable Maintenance Mode
```
immich-admin enable-maintenance-mode
Maintenance mode has been enabled.
Log in using the following URL:
https://my.immich.app/maintenance?token=<token>
```
Enable OAuth login
```

View File

@@ -12,13 +12,3 @@ pnpm run migrations:generate <migration-name>
3. Move the migration file to folder `./server/src/schema/migrations` in your code editor.
The server will automatically detect `*.ts` file changes and restart. Part of the server start-up process includes running any new migrations, so it will be applied immediately.
## Reverting a Migration
If you need to undo the most recently applied migration—for example, when developing or testing on schema changes—run:
```bash
pnpm run migrations:revert
```
This command rolls back the latest migration and brings the database schema back to its previous state.

View File

@@ -256,7 +256,7 @@ The Dev Container supports multiple ways to run tests:
```bash
# Run tests for specific components
make test-server # Server unit tests
make test-server # Server unit tests
make test-web # Web unit tests
make test-e2e # End-to-end tests
make test-cli # CLI tests
@@ -268,13 +268,12 @@ make test-all # Runs tests for all components
make test-medium-dev # End-to-end tests
```
#### Using PNPM Directly
#### Using NPM Directly
```bash
# Server tests
cd /workspaces/immich/server
pnpm test # Run all tests
pnpm run test:medium # Medium tests (integration tests)
pnpm test # Run all tests
pnpm run test:watch # Watch mode
pnpm run test:cov # Coverage report
@@ -294,21 +293,21 @@ pnpm run test:web # Run web UI tests
```bash
# Linting
make lint-server # Lint server code
make lint-web # Lint web code
make lint-all # Lint all components
make lint-web # Lint web code
make lint-all # Lint all components
# Formatting
make format-server # Format server code
make format-web # Format web code
make format-all # Format all code
make format-web # Format web code
make format-all # Format all code
# Type checking
make check-server # Type check server
make check-web # Type check web
make check-all # Check all components
make check-web # Type check web
make check-all # Check all components
# Complete hygiene check
make hygiene-all # Run lint, format, check, SQL sync, and audit
make hygiene-all # Runs lint, format, check, SQL sync, and audit
```
### Additional Make Commands
@@ -316,21 +315,21 @@ make hygiene-all # Run lint, format, check, SQL sync, and audit
```bash
# Build commands
make build-server # Build server
make build-web # Build web app
make build-all # Build everything
make build-web # Build web app
make build-all # Build everything
# API generation
make open-api # Generate OpenAPI specs
make open-api # Generate OpenAPI specs
make open-api-typescript # Generate TypeScript SDK
make open-api-dart # Generate Dart SDK
make open-api-dart # Generate Dart SDK
# Database
make sql # Sync database schema
make sql # Sync database schema
# Dependencies
make install-server # Install server dependencies
make install-web # Install web dependencies
make install-all # Install all dependencies
make install-server # Install server dependencies
make install-web # Install web dependencies
make install-all # Install all dependencies
```
### Debugging

View File

@@ -14,15 +14,15 @@ When contributing code through a pull request, please check the following:
- [ ] `pnpm run check:typescript` (check typescript)
- [ ] `pnpm test` (unit tests)
:::tip AIO
Run all web checks with `pnpm run check:all`
:::
## Documentation
- [ ] `pnpm run format` (formatting via Prettier)
- [ ] Update the `_redirects` file if you have renamed a page or removed it from the documentation.
:::tip AIO
Run all web checks with `pnpm run check:all`
:::
## Server Checks
- [ ] `pnpm run lint` (linting via ESLint)

View File

@@ -4,12 +4,8 @@ sidebar_position: 2
# Setup
:::warning
Make sure to read the [`CONTRIBUTING.md`](https://github.com/immich-app/immich/blob/main/CONTRIBUTING.md) before you dive into the code.
:::
:::note
If there's a feature you're planning to work on, just give us a heads up in [#contributing](https://discord.com/channels/979116623879368755/1071165397228855327) on [our Discord](https://discord.immich.app) so we can:
If there's a feature you're planning to work on, just give us a heads up in [Discord](https://discord.com/channels/979116623879368755/1071165397228855327) so we can:
1. Let you know if it's something we would accept into Immich
2. Provide any guidance on how something like that would ideally be implemented
@@ -52,6 +48,7 @@ You can access the web from `http://your-machine-ip:3000` or `http://localhost:3
**Notes:**
- The "web" development container runs with uid 1000. If that uid does not have read/write permissions on the mounted volumes, you may encounter errors
- In case of rootless docker setup, you need to use root within the container, otherwise you will encounter read/write permission related errors, see comments in `docker/docker-compose.dev.yml`.
#### Connect web to a remote backend

View File

@@ -18,7 +18,6 @@ make e2e
Before you can run the tests, you need to run the following commands _once_:
- `pnpm install` (in `e2e/`)
- `pnpm run build` (in `cli/`)
- `make open-api` (in the project root `/`)
Once the test environment is running, the e2e tests can be run via:

View File

@@ -1,42 +1,37 @@
# Automatic Backup
## Overview
Immich supports uploading photos and videos from your mobile device to the server automatically.
By enable the backup button, Immich will upload new photos and videos from selected albums when you open or resume the app, as well as periodically in the background (iOS), or when the a new photos or videos are taken (Android).
---
<img
src={require('./img/enable-backup-button.webp').default}
width="300px"
title="Upload button"
/>
You can enable the settings by accessing the upload options from the upload page
## Platform Specific Features
<img src={require('./img/backup-settings-access.webp').default} width="50%" title="Backup option selection" />
### General
<img src={require('./img/background-foreground-backup.webp').default} width="50%" title="Foreground&Background Backup" />
By default, Immich will only upload photos and videos when connected to Wi-Fi. You can change this behavior in the backup settings page.
## Foreground backup
<img
src={require('./img/backup-options.webp').default}
width="500px"
title="Upload button"
/>
If foreground backup is enabled: whenever the app is opened or resumed, it will check if any photos or videos in the selected album(s) have yet to be uploaded to the cloud (the remainder count). If there are any, they will be uploaded.
### Android
## Background backup
<img
src={require('./img/android-backup-options.webp').default}
width="500px"
title="Upload button"
/>
This feature is intended for everyday use. For initial bulk uploading, please use the foreground upload feature. For more information on why background upload is not working as expected, please refer to the [FAQ](/FAQ#why-does-foreground-backup-stop-when-i-navigate-away-from-the-app-shouldnt-it-transfer-the-job-to-background-backup).
If background backup is enabled. The app will periodically check if there are any new photos or videos in the selected album(s) to be uploaded to the server. If there are, it will upload them to the cloud in the background.
:::info Note
#### General
- The app must be in the background for the backup worker to start running.
- If you reopen the app and the first page you see is the backup page, the counts will not reflect the background uploaded result. You have to navigate out of the page and come back to see the updated counts.
#### Android
- It is a well-known problem that some Android models are very strict with battery optimization settings, which can cause a problem with the background worker. Please visit [Don't kill my app](https://dontkillmyapp.com/) for a guide on disabling this setting on your phone.
- You can allow the background task to run when the device is charging.
- You can set the minimum delay from the time a photo is taken to when the background upload task will run.
### iOS
#### iOS
- You must enable **Background App Refresh** for the app to work in the background. You can enable it in the Settings app under General > Background App Refresh.
@@ -44,4 +39,4 @@ title="Upload button"
<img src={require('./img/background-app-refresh.webp').default} width="30%" title="background-app-refresh" />
</div>
- iOS automatically manages background tasks, the app cannot control when the background upload task will run. It is known that the more frequently you open the app, the more often the background task will run.
:::

View File

@@ -103,7 +103,6 @@ Options:
-c, --concurrency <number> Number of assets to upload at the same time (default: 4, env: IMMICH_UPLOAD_CONCURRENCY)
-j, --json-output Output detailed information in json format (default: false, env: IMMICH_JSON_OUTPUT)
--delete Delete local assets after upload (env: IMMICH_DELETE_ASSETS)
--delete-duplicates Delete local assets that are duplicates (already exist on server) (env: IMMICH_DELETE_DUPLICATES)
--no-progress Hide progress bars (env: IMMICH_PROGRESS_BAR)
--watch Watch for changes and upload automatically (default: false, env: IMMICH_WATCH_CHANGES)
--help display help for command
@@ -188,8 +187,6 @@ immich upload --dry-run . | tail -n +6 | jq .newFiles[]
### Obtain the API Key
The API key can be obtained in the user setting panel on the web interface. You can also specify permissions for the key to limit its access.
The API key can be obtained in the user setting panel on the web interface.
![Obtain Api Key](./img/obtain-api-key.webp)
![Specify permission for the key](./img/obtain-api-key-2.webp)

View File

@@ -21,14 +21,14 @@ The asset detail view will also show the faces that are recognized in the asset.
Additional actions you can do include:
- Changing the feature photo of the person
- Hiding the faces of a person from the Explore page and detail view
- Setting a person's date of birth, so that the age of the person can be shown at the time the photo was taken
- Setting a person's date of birth
- Merging two or more detected faces into one person
- Favoriting a person to pin them to the top of the list
- Hiding the faces of a person from the Explore page and detail view
- Assigning an unrecognized face to a person
It can be found from the app bar when you access the detail view of a person.
<img src={require('./img/facial-recognition-4.webp').default} title='Facial Recognition 4' />
<img src={require('./img/facial-recognition-4.webp').default} title='Facial Recognition 4' width="70%"/>
## How Face Detection Works

View File

@@ -71,22 +71,6 @@ For RKMPP to work:
5. (Optional) Enable hardware decoding for optimal performance.
<details>
<summary>immich.json</summary>
If you use a [configuration file](/install/config-file.md), use the `accel` option to select the hardware (e.g. `qsv` for Intel or `nvenc` for Nvidia). Set `accelDecode` to `true` if you want hardware decoding.
```json
{
"ffmpeg": {
"accel": "qsv",
"accelDecode": true
}
}
```
</details>
#### Single Compose File
Some platforms, including Unraid and Portainer, do not support multiple Compose files as of writing. As an alternative, you can "inline" the relevant contents of the [`hwaccel.transcoding.yml`][hw-file] file into the `immich-server` service directly.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 76 KiB

After

Width:  |  Height:  |  Size: 42 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 96 KiB

After

Width:  |  Height:  |  Size: 94 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 286 KiB

After

Width:  |  Height:  |  Size: 59 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 77 KiB

After

Width:  |  Height:  |  Size: 22 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 17 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 127 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 231 KiB

After

Width:  |  Height:  |  Size: 180 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 73 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 7.8 KiB

After

Width:  |  Height:  |  Size: 13 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 88 KiB

After

Width:  |  Height:  |  Size: 34 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 16 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 19 KiB

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 34 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 45 KiB

After

Width:  |  Height:  |  Size: 19 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 39 KiB

After

Width:  |  Height:  |  Size: 24 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 27 KiB

After

Width:  |  Height:  |  Size: 17 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 38 KiB

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 319 KiB

After

Width:  |  Height:  |  Size: 99 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 150 KiB

After

Width:  |  Height:  |  Size: 43 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 99 KiB

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 69 KiB

After

Width:  |  Height:  |  Size: 44 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 229 KiB

After

Width:  |  Height:  |  Size: 28 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 33 KiB

After

Width:  |  Height:  |  Size: 25 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 112 KiB

After

Width:  |  Height:  |  Size: 85 KiB

View File

@@ -10,13 +10,11 @@ import MobileAppBackup from '/docs/partials/_mobile-app-backup.md';
<MobileAppDownload />
:::info Android verification
Below are the SHA-256 fingerprints for the certificates signing the android applications.
:::info Beta Program
The beta release channel allows users to test upcoming changes before they are officially released. To join the channel use the links below.
- Playstore / Github releases:
`86:C5:C4:55:DF:AF:49:85:92:3A:8F:35:AD:B3:1D:0C:9E:0B:95:7D:7F:94:C2:D2:AF:6A:24:38:AA:96:00:20`
- F-Droid releases:
`FA:8B:43:95:F4:A6:47:71:A0:53:D1:C7:57:73:5F:A2:30:13:74:F5:3D:58:0D:D1:75:AA:F7:A1:35:72:9C:BF`
- Android: Invitation link from [web](https://play.google.com/store/apps/details?id=app.alextran.immich) or from [mobile](https://play.google.com/store/apps/details?id=app.alextran.immich)
- iOS: [TestFlight invitation link](https://testflight.apple.com/join/1vYsAa8P)
:::
@@ -28,6 +26,10 @@ Below are the SHA-256 fingerprints for the certificates signing the android appl
<MobileAppBackup />
:::info
You can enable automatic backup on supported devices. For more information see [Automatic Backup](/features/automatic-backup.md).
:::
## Sync only selected photos
If you have a large number of photos on the device, and you would prefer not to backup all the photos, then it might be prudent to only backup selected photos from device to the Immich server.
@@ -45,45 +47,17 @@ This will enable a small cloud icon on the bottom right corner of the asset tile
Now make sure that the local album is selected in the backup screen (steps 1-2 above). You can find these albums listed in **<ins>Library -> On this device</ins>**. To selectively upload photos from these albums, simply select the local-only photos and tap on "Upload" button in the dynamic bottom menu.
<img
src={require('./img/mobile-upload-open-photo.webp').default}
width="50%"
title="Upload button on local asset preview"
/>
<img
src={require('./img/mobile-upload-selected-photos.webp').default}
width="40%"
title="Upload button after photos selection"
/>
## Free Up Space
The **Free Up Space** tool allows you to remove local media files from your device that have already been successfully backed up to your Immich server (and are not in the Immich trash). This helps reclaim storage on your mobile device without losing your memories.
### How it works
<img src={require('./img/free-up-space.webp').default} title="Free up space" />
1. **Configuration:**
- **Cutoff Date:** You can select a cutoff date. The tool will only look for photos and videos **on or before** this date.
- **Filter Options:** You can choose to remove **All** assets, or restrict removal to **Photos only** or **Videos only**.
- **Keep Favorites:** By default, local assets marked as favorites are preserved on your device, even if they match the cutoff date.
2. **Scan & Review:** Before any files are removed, you are presented with a review screen to verify which items will be deleted.
3. **Deletion:** Confirmed items are moved to your device's native Trash/Recycle Bin.
:::info reclaim storage
To permanently free up space, you must manually empty the system/gallery trash.
:::
### iCloud Photos
If you use **iCloud Photos** alongside Immich, it is vital to understand how deletion affects your data. iCloud utilizes a two-way sync; this means deleting a photo from your iPhone to free up space will **also delete it from iCloud**.
Assets that are part of an **iCloud Shared Album** are automatically excluded from the cleanup scan because iCloud does not allow removing the items from the device.
### External App Dependencies (WhatsApp, etc.)
Android applications like **WhatsApp** rely on local files to display media in chat history.
If Immich backs up your WhatsApp folder and you run **Free Up Space**, the local copies of these images will be deleted. Consequently, **media in your WhatsApp chats will appear blurry or missing.** You will only be able to view these photos inside the Immich app; they will no longer be visible within the WhatsApp interface.
**Recommendation:** If keeping chat history intact is important, please ensure you review the deletion list carefully or consider excluding WhatsApp folders from the backup if you intend to use this feature frequently.
## Album Sync
You can sync or mirror an album from your phone to the Immich server on your account. For example, if you select Recents, Camera and Videos album for backup, the corresponding album with the same name will be created on the server. Once the assets from those albums are uploaded, they will be put into the target albums automatically.
@@ -111,3 +85,11 @@ Enter the cloud on the top right -> cog wheel on the top right -> select the syn
If you delete/move photos in the local album on your device, it will not be reflected in the album on the server **even if** you click Sync albums
It will only reflect files you add.
:::
If the same asset is in more than one album it will only sync to the first album it's in, after that it won't sync again even if the user clicks sync albums manually.
To overcome this limitation, the files must be removed from the ignore list by
App settings -> Advanced -> Duplicate Assets -> Clear
:::info
Cleaning duplicate assets from the list will cause all the previously uploaded duplicate files to be re-uploaded, the files will not actually be uploaded and will be rejected on the server side (due to duplication) but will be synchronized to the album and at the end will be added to the ignore list again at the end of the synchronization.
:::

View File

@@ -112,40 +112,4 @@ You can then make a new panel, specifying Prometheus as the data source for it.
-- TODO: add images and more details here
## Structured Logging
In addition to Prometheus metrics, Immich supports structured JSON logging which is ideal for log aggregation systems like Grafana Loki, ELK Stack, Datadog, Splunk, and others.
### Configuration
By default, Immich outputs human-readable console logs. To enable JSON logging, set the `IMMICH_LOG_FORMAT` environment variable:
```bash
IMMICH_LOG_FORMAT=json
```
:::tip
The default is `IMMICH_LOG_FORMAT=console` for human-readable logs with colors during development. For production deployments using log aggregation, use `IMMICH_LOG_FORMAT=json`.
:::
### JSON Log Format
When enabled, logs are output in structured JSON format:
```json
{"level":"log","pid":36,"timestamp":1766533331507,"message":"Initialized websocket server","context":"WebsocketRepository"}
{"level":"warn","pid":48,"timestamp":1766533331629,"message":"Unable to open /build/www/index.html, skipping SSR.","context":"ApiService"}
{"level":"error","pid":36,"timestamp":1766533331690,"message":"Failed to load plugin immich-core:","context":"Error"}
```
This format includes:
- `level`: Log level (log, warn, error, etc.)
- `pid`: Process ID
- `timestamp`: Unix timestamp in milliseconds
- `message`: Log message
- `context`: Service or component that generated the log
For more information on log formats, see [`IMMICH_LOG_FORMAT`](/install/environment-variables.md#general).
[prom-file]: https://github.com/immich-app/immich/releases/latest/download/prometheus.yml

View File

@@ -1222,4 +1222,4 @@ Feel free to make a feature request if there's a model you want to use that we d
[huggingface-clip]: https://huggingface.co/collections/immich-app/clip-654eaefb077425890874cd07
[huggingface-multilingual-clip]: https://huggingface.co/collections/immich-app/multilingual-clip-654eb08c2382f591eeb8c2a7
[smart-search-settings]: https://my.immich.app/admin/system-settings?isOpen=machine-learning+smart-search
[job-status-page]: https://my.immich.app/admin/queues
[job-status-page]: https://my.immich.app/admin/jobs-status

View File

@@ -33,7 +33,7 @@ You can create a public link to share a group of photos or videos, or an album,
The public shared link is generated with a random URL, which acts as as a secret to avoid the link being guessed by unwanted parties, for instance.
```
https://my.immich.app/share/JUckRMxlgpo7F9BpyqGk_cZEwDzaU_U5LU5_oNZp1ETIBa9dpQ0b5ghNm_22QVJfn3k
https://immich.yourdomain.com/share/JUckRMxlgpo7F9BpyqGk_cZEwDzaU_U5LU5_oNZp1ETIBa9dpQ0b5ghNm_22QVJfn3k
```
### Creating a public share link

View File

@@ -106,14 +106,14 @@ SELECT "user"."email", "asset"."type", COUNT(*) FROM "asset"
```sql title="Count by tag"
SELECT "t"."value" AS "tag_name", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value" ORDER BY "number_assets" DESC;
```
```sql title="Count by tag (per user)"
SELECT "t"."value" AS "tag_name", "u"."email" as "user_email", COUNT(*) AS "number_assets" FROM "tag" "t"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagId" JOIN "asset" "a" ON "ta"."assetId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
JOIN "tag_asset" "ta" ON "t"."id" = "ta"."tagsId" JOIN "asset" "a" ON "ta"."assetsId" = "a"."id" JOIN "user" "u" ON "a"."ownerId" = "u"."id"
WHERE "a"."visibility" != 'hidden'
GROUP BY "t"."value", "u"."email" ORDER BY "number_assets" DESC;
```

Binary file not shown.

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 7.6 KiB

View File

@@ -53,7 +53,7 @@ Version mismatches between both hosts may cause bugs and instability, so remembe
Adding a new URL to the settings is recommended over replacing the existing URL. This is because it will allow machine learning tasks to be processed successfully when the remote server is down by falling back to the local machine learning container. If you do not want machine learning tasks to be processed locally when the remote server is not available, you can instead replace the existing URL and only provide the remote container's URL. If doing this, you can remove the `immich-machine-learning` section of the local `docker-compose.yml` file to save resources, as this service will never be used.
Do note that this will mean that Smart Search and Face Detection jobs will fail to be processed when the remote instance is not available. This in turn means that tasks dependent on these features—Duplicate Detection and Facial Recognition—will not run for affected assets. If this occurs, you must manually click the _Missing_ button next to Smart Search and Face Detection in the [Job Status](http://my.immich.app/admin/queues) page for the jobs to be retried.
Do note that this will mean that Smart Search and Face Detection jobs will fail to be processed when the remote instance is not available. This in turn means that tasks dependent on these features—Duplicate Detection and Facial Recognition—will not run for affected assets. If this occurs, you must manually click the _Missing_ button next to Smart Search and Face Detection in the [Job Status](http://my.immich.app/admin/jobs-status) page for the jobs to be retried.
## Load balancing

View File

@@ -16,76 +16,48 @@ The default configuration looks like this:
```json
{
"ffmpeg": {
"crf": 23,
"threads": 0,
"preset": "ultrafast",
"targetVideoCodec": "h264",
"acceptedVideoCodecs": ["h264"],
"targetAudioCodec": "aac",
"acceptedAudioCodecs": ["aac", "mp3", "libopus", "pcm_s16le"],
"acceptedContainers": ["mov", "ogg", "webm"],
"targetResolution": "720",
"maxBitrate": "0",
"bframes": -1,
"refs": 0,
"gopSize": 0,
"temporalAQ": false,
"cqMode": "auto",
"twoPass": false,
"preferredHwDevice": "auto",
"transcode": "required",
"tonemap": "hable",
"accel": "disabled",
"accelDecode": false
},
"backup": {
"database": {
"cronExpression": "0 02 * * *",
"enabled": true,
"cronExpression": "0 02 * * *",
"keepLastAmount": 14
}
},
"ffmpeg": {
"accel": "disabled",
"accelDecode": false,
"acceptedAudioCodecs": ["aac", "mp3", "libopus"],
"acceptedContainers": ["mov", "ogg", "webm"],
"acceptedVideoCodecs": ["h264"],
"bframes": -1,
"cqMode": "auto",
"crf": 23,
"gopSize": 0,
"maxBitrate": "0",
"preferredHwDevice": "auto",
"preset": "ultrafast",
"refs": 0,
"targetAudioCodec": "aac",
"targetResolution": "720",
"targetVideoCodec": "h264",
"temporalAQ": false,
"threads": 0,
"tonemap": "hable",
"transcode": "required",
"twoPass": false
},
"image": {
"colorspace": "p3",
"extractEmbedded": false,
"fullsize": {
"enabled": false,
"format": "jpeg",
"quality": 80
},
"preview": {
"format": "jpeg",
"quality": 80,
"size": 1440
},
"thumbnail": {
"format": "webp",
"quality": 80,
"size": 250
}
},
"job": {
"backgroundTask": {
"concurrency": 5
},
"faceDetection": {
"smartSearch": {
"concurrency": 2
},
"library": {
"concurrency": 5
},
"metadataExtraction": {
"concurrency": 5
},
"migration": {
"concurrency": 5
},
"notifications": {
"concurrency": 5
},
"ocr": {
"concurrency": 1
"faceDetection": {
"concurrency": 2
},
"search": {
"concurrency": 5
@@ -93,23 +65,20 @@ The default configuration looks like this:
"sidecar": {
"concurrency": 5
},
"smartSearch": {
"concurrency": 2
"library": {
"concurrency": 5
},
"migration": {
"concurrency": 5
},
"thumbnailGeneration": {
"concurrency": 3
},
"videoConversion": {
"concurrency": 1
}
},
"library": {
"scan": {
"cronExpression": "0 0 * * *",
"enabled": true
},
"watch": {
"enabled": false
"notifications": {
"concurrency": 5
}
},
"logging": {
@@ -117,11 +86,8 @@ The default configuration looks like this:
"level": "log"
},
"machineLearning": {
"availabilityChecks": {
"enabled": true,
"interval": 30000,
"timeout": 2000
},
"enabled": true,
"urls": ["http://immich-machine-learning:3003"],
"clip": {
"enabled": true,
"modelName": "ViT-B-32__openai"
@@ -130,59 +96,27 @@ The default configuration looks like this:
"enabled": true,
"maxDistance": 0.01
},
"enabled": true,
"facialRecognition": {
"enabled": true,
"maxDistance": 0.5,
"minFaces": 3,
"modelName": "buffalo_l",
"minScore": 0.7,
"modelName": "buffalo_l"
},
"ocr": {
"enabled": true,
"maxResolution": 736,
"minDetectionScore": 0.5,
"minRecognitionScore": 0.8,
"modelName": "PP-OCRv5_mobile"
},
"urls": ["http://immich-machine-learning:3003"]
"maxDistance": 0.5,
"minFaces": 3
}
},
"map": {
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json",
"enabled": true,
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json"
"lightStyle": "https://tiles.immich.cloud/v1/style/light.json",
"darkStyle": "https://tiles.immich.cloud/v1/style/dark.json"
},
"reverseGeocoding": {
"enabled": true
},
"metadata": {
"faces": {
"import": false
}
},
"newVersionCheck": {
"enabled": true
},
"nightlyTasks": {
"clusterNewFaces": true,
"databaseCleanup": true,
"generateMemories": true,
"missingThumbnails": true,
"startTime": "00:00",
"syncQuotaUsage": true
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"host": "",
"ignoreCert": false,
"password": "",
"port": 587,
"secure": false,
"username": ""
}
}
},
"oauth": {
"autoLaunch": false,
"autoRegister": true,
@@ -194,44 +128,70 @@ The default configuration looks like this:
"issuerUrl": "",
"mobileOverrideEnabled": false,
"mobileRedirectUri": "",
"profileSigningAlgorithm": "none",
"roleClaim": "immich_role",
"scope": "openid email profile",
"signingAlgorithm": "RS256",
"profileSigningAlgorithm": "none",
"storageLabelClaim": "preferred_username",
"storageQuotaClaim": "immich_quota",
"timeout": 30000,
"tokenEndpointAuthMethod": "client_secret_post"
"storageQuotaClaim": "immich_quota"
},
"passwordLogin": {
"enabled": true
},
"reverseGeocoding": {
"enabled": true
},
"server": {
"externalDomain": "",
"loginPageMessage": "",
"publicUsers": true
},
"storageTemplate": {
"enabled": false,
"hashVerificationEnabled": true,
"template": "{{y}}/{{y}}-{{MM}}-{{dd}}/{{filename}}"
},
"templates": {
"email": {
"albumInviteTemplate": "",
"albumUpdateTemplate": "",
"welcomeTemplate": ""
}
"image": {
"thumbnail": {
"format": "webp",
"size": 250,
"quality": 80
},
"preview": {
"format": "jpeg",
"size": 1440,
"quality": 80
},
"colorspace": "p3",
"extractEmbedded": false
},
"newVersionCheck": {
"enabled": true
},
"trash": {
"enabled": true,
"days": 30
},
"theme": {
"customCss": ""
},
"trash": {
"days": 30,
"enabled": true
"library": {
"scan": {
"enabled": true,
"cronExpression": "0 0 * * *"
},
"watch": {
"enabled": false
}
},
"server": {
"externalDomain": "",
"loginPageMessage": ""
},
"notifications": {
"smtp": {
"enabled": false,
"from": "",
"replyTo": "",
"transport": {
"ignoreCert": false,
"host": "",
"port": 587,
"username": "",
"password": ""
}
}
},
"user": {
"deleteDelay": 7

View File

@@ -17,11 +17,11 @@ If this does not work, try running `docker compose up -d --force-recreate`.
## Docker Compose
| Variable | Description | Default | Containers |
| :----------------- | :------------------------------ | :-----: | :----------------------- |
| `IMMICH_VERSION` | Image tags | `v2` | server, machine learning |
| `UPLOAD_LOCATION` | Host path for uploads | | server |
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
| Variable | Description | Default | Containers |
| :----------------- | :------------------------------ | :-------: | :----------------------- |
| `IMMICH_VERSION` | Image tags | `release` | server, machine learning |
| `UPLOAD_LOCATION` | Host path for uploads | | server |
| `DB_DATA_LOCATION` | Host path for Postgres database | | database |
:::tip
These environment variables are used by the `docker-compose.yml` file and do **NOT** affect the containers directly.
@@ -34,7 +34,6 @@ These environment variables are used by the `docker-compose.yml` file and do **N
| `TZ` | Timezone | <sup>\*1</sup> | server | microservices |
| `IMMICH_ENV` | Environment (production, development) | `production` | server, machine learning | api, microservices |
| `IMMICH_LOG_LEVEL` | Log level (verbose, debug, log, warn, error) | `log` | server, machine learning | api, microservices |
| `IMMICH_LOG_FORMAT` | Log output format (`console`, `json`) | `console` | server | api, microservices |
| `IMMICH_MEDIA_LOCATION` | Media location inside the container ⚠️**You probably shouldn't set this**<sup>\*2</sup>⚠️ | `/data` | server | api, microservices |
| `IMMICH_CONFIG_FILE` | Path to config file | | server | api, microservices |
| `NO_COLOR` | Set to `true` to disable color-coded log output | `false` | server, machine learning | |
@@ -44,7 +43,6 @@ These environment variables are used by the `docker-compose.yml` file and do **N
| `IMMICH_PROCESS_INVALID_IMAGES` | When `true`, generate thumbnails for invalid images | | server | microservices |
| `IMMICH_TRUSTED_PROXIES` | List of comma-separated IPs set as trusted proxies | | server | api |
| `IMMICH_IGNORE_MOUNT_CHECK_ERRORS` | See [System Integrity](/administration/system-integrity) | | server | api, microservices |
| `IMMICH_ALLOW_SETUP` | When `false` disables the `/auth/admin-sign-up` endpoint | `true` | server | api |
\*1: `TZ` should be set to a `TZ identifier` from [this list][tz-list]. For example, `TZ="Etc/UTC"`.
`TZ` is used by `exiftool` as a fallback in case the timezone cannot be determined from the image metadata. It is also used for logfile timestamps and cron job execution.
@@ -64,10 +62,10 @@ Information on the current workers can be found [here](/administration/jobs-work
## Ports
| Variable | Description | Default | Containers |
| :------------ | :------------- | :----------------------------------------: | :----------------------- |
| `IMMICH_HOST` | Listening host | `0.0.0.0` | server, machine learning |
| `IMMICH_PORT` | Listening port | `2283` (server), `3003` (machine learning) | server, machine learning |
| Variable | Description | Default |
| :------------ | :------------- | :----------------------------------------: |
| `IMMICH_HOST` | Listening host | `0.0.0.0` |
| `IMMICH_PORT` | Listening port | `2283` (server), `3003` (machine learning) |
## Database
@@ -82,7 +80,7 @@ Information on the current workers can be found [here](/administration/jobs-work
| `DB_SSL_MODE` | Database SSL mode | | server |
| `DB_VECTOR_EXTENSION`<sup>\*2</sup> | Database vector extension (one of [`vectorchord`, `pgvector`, `pgvecto.rs`]) | | server |
| `DB_SKIP_MIGRATIONS` | Whether to skip running migrations on startup (one of [`true`, `false`]) | `false` | server |
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | database |
| `DB_STORAGE_TYPE` | Optimize concurrent IO on SSDs or sequential IO on HDDs ([`SSD`, `HDD`])<sup>\*3</sup> | `SSD` | server |
\*1: The values of `DB_USERNAME`, `DB_PASSWORD`, and `DB_DATABASE_NAME` are passed to the Postgres container as the variables `POSTGRES_USER`, `POSTGRES_PASSWORD`, and `POSTGRES_DB` in `docker-compose.yml`.
@@ -95,7 +93,7 @@ Information on the current workers can be found [here](/administration/jobs-work
All `DB_` variables must be provided to all Immich workers, including `api` and `microservices`.
`DB_URL` must be in the format `postgresql://immichdbusername:immichdbpassword@postgreshost:postgresport/immichdatabasename`.
You can require SSL by adding `?sslmode=require` to the end of the `DB_URL` string, or require SSL and skip certificate verification by adding `?sslmode=require&uselibpqcompat=true`. This allows both immich and `pg_dumpall` (the utility used for database backups) to [properly connect](https://github.com/brianc/node-postgres/tree/master/packages/pg-connection-string#tcp-connections) to your database.
You can require SSL by adding `?sslmode=require` to the end of the `DB_URL` string, or require SSL and skip certificate verification by adding `?sslmode=require&sslmode=no-verify`.
When `DB_URL` is defined, the `DB_HOSTNAME`, `DB_PORT`, `DB_USERNAME`, `DB_PASSWORD` and `DB_DATABASE_NAME` database variables are ignored.
@@ -151,31 +149,29 @@ Redis (Sentinel) URL example JSON before encoding:
## Machine Learning
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :----------------------------------------------------------------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__OCR` | Set the maximum number of boxes that will be processed at once by the OCR model | `6` | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spun up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
| `MACHINE_LEARNING_OPENVINO_PRECISION` | If set to FP16, uses half-precision floating-point operations for faster inference with reduced accuracy (one of [`FP16`, `FP32`], applies only to OpenVINO) | `FP32` | machine learning |
| Variable | Description | Default | Containers |
| :---------------------------------------------------------- | :-------------------------------------------------------------------------------------------------- | :-----------------------------: | :--------------- |
| `MACHINE_LEARNING_MODEL_TTL` | Inactivity time (s) before a model is unloaded (disabled if \<= 0) | `300` | machine learning |
| `MACHINE_LEARNING_MODEL_TTL_POLL_S` | Interval (s) between checks for the model TTL (disabled if \<= 0) | `10` | machine learning |
| `MACHINE_LEARNING_CACHE_FOLDER` | Directory where models are downloaded | `/cache` | machine learning |
| `MACHINE_LEARNING_REQUEST_THREADS`<sup>\*1</sup> | Thread count of the request thread pool (disabled if \<= 0) | number of CPU cores | machine learning |
| `MACHINE_LEARNING_MODEL_INTER_OP_THREADS` | Number of parallel model operations | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_INTRA_OP_THREADS` | Number of threads for each model operation | `2` | machine learning |
| `MACHINE_LEARNING_WORKERS`<sup>\*2</sup> | Number of worker processes to spawn | `1` | machine learning |
| `MACHINE_LEARNING_HTTP_KEEPALIVE_TIMEOUT_S`<sup>\*3</sup> | HTTP Keep-alive time in seconds | `2` | machine learning |
| `MACHINE_LEARNING_WORKER_TIMEOUT` | Maximum time (s) of unresponsiveness before a worker is killed | `120` (`300` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__TEXTUAL` | Comma-separated list of (textual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__CLIP__VISUAL` | Comma-separated list of (visual) CLIP model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__RECOGNITION` | Comma-separated list of (recognition) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_PRELOAD__FACIAL_RECOGNITION__DETECTION` | Comma-separated list of (detection) facial recognition model(s) to preload and cache | | machine learning |
| `MACHINE_LEARNING_ANN` | Enable ARM-NN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_ANN_FP16_TURBO` | Execute operations in FP16 precision: increasing speed, reducing precision (applies only to ARM-NN) | `False` | machine learning |
| `MACHINE_LEARNING_ANN_TUNING_LEVEL` | ARM-NN GPU tuning level (1: rapid, 2: normal, 3: exhaustive) | `2` | machine learning |
| `MACHINE_LEARNING_DEVICE_IDS`<sup>\*4</sup> | Device IDs to use in multi-GPU environments | `0` | machine learning |
| `MACHINE_LEARNING_MAX_BATCH_SIZE__FACIAL_RECOGNITION` | Set the maximum number of faces that will be processed at once by the facial recognition model | None (`1` if using OpenVINO) | machine learning |
| `MACHINE_LEARNING_RKNN` | Enable RKNN hardware acceleration if supported | `True` | machine learning |
| `MACHINE_LEARNING_RKNN_THREADS` | How many threads of RKNN runtime should be spinned up while inferencing. | `1` | machine learning |
| `MACHINE_LEARNING_MODEL_ARENA` | Pre-allocates CPU memory to avoid memory fragmentation | true | machine learning |
\*1: It is recommended to begin with this parameter when changing the concurrency levels of the machine learning service and then tune the other ones.

View File

@@ -17,17 +17,12 @@ Hardware and software requirements for Immich:
- Immich runs well in a virtualized environment when running in a full virtual machine.
The use of Docker in LXC containers is [not recommended](https://pve.proxmox.com/wiki/Linux_Container), but may be possible for advanced users.
If you have issues, we recommend that you switch to a supported VM deployment.
- **RAM**: Minimum 6GB, recommended 8GB.
- **RAM**: Minimum 4GB, recommended 6GB.
- **CPU**: Minimum 2 cores, recommended 4 cores.
- **Storage**: Recommended Unix-compatible filesystem (EXT4, ZFS, APFS, etc.) with support for user/group ownership and permissions.
- The generation of thumbnails and transcoded video can increase the size of the photo library by 10-20% on average.
:::note RAM requirements
For a smooth experience, especially during asset upload, Immich requires at least 6GB of RAM.
For systems with only 4GB of RAM, Immich can be run with machine learning features disabled.
:::
:::tip Postgres setup
:::tip
Good performance and a stable connection to the Postgres database is critical to a smooth Immich experience.
The Postgres database files are typically between 1-3 GB in size.
For this reason, the Postgres database (`DB_DATA_LOCATION`) should ideally use local SSD storage, and never a network share of any kind.

View File

@@ -10,7 +10,7 @@ to install and use it.
## Requirements
- A system with at least 6GB of RAM and 2 CPU cores.
- A system with at least 4GB of RAM and 2 CPU cores.
- [Docker](https://docs.docker.com/engine/install/)
> For a more detailed list of requirements, see the [requirements page](/install/requirements).
@@ -63,9 +63,9 @@ The backup time differs depending on how many photos are on your mobile device.
take quite a while.
To quickly get going, you can selectively upload few photos first, by following this [guide](/features/mobile-app#sync-only-selected-photos).
You can select the **Job Queues** tab to see Immich processing your photos.
You can select the **Jobs** tab to see Immich processing your photos.
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Job Queues Tah" width={300} />
<img src={require('/docs/guides/img/jobs-tab.webp').default} title="Jobs tab" width={300} />
---

View File

@@ -6,4 +6,4 @@
<img src={require('./img/album-selection.webp').default} width='50%' title='Backup button' />
3. Scroll down to the bottom and press "**Enable Backup**" to start the backup process. This will upload all the assets in the selected albums.
3. Scroll down to the bottom and press "**Start Backup**" to start the backup process. This will upload all the assets in the selected albums.

Some files were not shown because too many files have changed in this diff Show More