Compare commits

...

7 Commits

Author SHA1 Message Date
mertalev
e176917e8d add file sharing permission 2026-04-08 17:47:36 -04:00
mertalev
edbae35ea0 thumbnail bench 2026-04-08 17:45:02 -04:00
Luis Nachtigall
2b0f6c9202 fix(mobile): improve image load cancellation handling (#27624)
fix(image): improve image load cancellation handling
2026-04-08 17:23:42 -04:00
André Erasmus
55ab8c65b6 fix(server): avoid false restore failures on large database imports (#27420)
* fix(server): increase restore health check timeout and reject with Error

* chore: clean up

---------

Co-authored-by: André Erasmus <25480506+NoBadDays@users.noreply.github.com>
Co-authored-by: Jason Rasmussen <jason@rasm.me>
2026-04-08 16:03:41 -04:00
Cullen Jennings
781d568f29 fix(docs): typo 'Start rating' to 'Star rating' (#27606) 2026-04-08 18:25:45 +00:00
Zack Pollard
6a361dae72 fix(server): use randomized cron for version check scheduling (#27626)
Also removes unnecessary rate limit
2026-04-08 19:15:38 +01:00
renovate[bot]
64766c8c06 chore(deps): update github-actions (#27560)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2026-04-08 17:32:54 +02:00
23 changed files with 335 additions and 71 deletions

View File

@@ -210,7 +210,7 @@ jobs:
working-directory: ./mobile
- name: Setup Ruby
uses: ruby/setup-ruby@c515ec17f69368147deb311832da000dd229d338 # v1.297.0
uses: ruby/setup-ruby@3ff19f5e2baf30647122352b96108b1fbe250c64 # v1.299.0
with:
ruby-version: '3.3'
bundler-cache: true

View File

@@ -89,7 +89,7 @@ jobs:
uses: docker/setup-buildx-action@4d04d5d9486b7bd6fa91e7baf45bbb4f8b9deedd # v4.0.0
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
registry: ghcr.io

View File

@@ -35,7 +35,7 @@ jobs:
needs: [get_body, should_run]
if: ${{ needs.should_run.outputs.should_run == 'true' }}
container:
image: ghcr.io/immich-app/mdq:main@sha256:df7188ba88abb0800d73cc97d3633280f0c0c3d4c441d678225067bf154150fb
image: ghcr.io/immich-app/mdq:main@sha256:557cca601891b8b7d78b940071d35aaf7aaeb9b327d19b22cf282118edbc5272
outputs:
checked: ${{ steps.get_checkbox.outputs.checked }}
steps:

View File

@@ -57,7 +57,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/init@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
@@ -70,7 +70,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/autobuild@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
@@ -83,6 +83,6 @@ jobs:
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@38697555549f1db7851b81482ff19f1fa5c4fedc # v4.34.1
uses: github/codeql-action/analyze@c10b8064de6f491fea524254123dbe5e09572f13 # v4.35.1
with:
category: '/language:${{matrix.language}}'

View File

@@ -60,7 +60,7 @@ jobs:
suffix: ['', '-cuda', '-rocm', '-openvino', '-armnn', '-rknn']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}
@@ -90,7 +90,7 @@ jobs:
suffix: ['']
steps:
- name: Login to GitHub Container Registry
uses: docker/login-action@b45d80f862d83dbcd57f89517bcf500b2ab88fb2 # v4.0.0
uses: docker/login-action@4907a6ddec9925e35a0a9e82d7399ccc52663121 # v4.1.0
with:
registry: ghcr.io
username: ${{ github.repository_owner }}

View File

@@ -19,7 +19,7 @@ jobs:
app-id: ${{ secrets.PUSH_O_MATIC_APP_ID }}
private-key: ${{ secrets.PUSH_O_MATIC_APP_KEY }}
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
- uses: mshick/add-pr-comment@64b8e914979889d746c99dea15a76e77ef64580a # v3.10.0
with:
github-token: ${{ steps.token.outputs.token }}
message-id: 'preview-status'
@@ -48,14 +48,14 @@ jobs:
name: 'preview'
})
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
- uses: mshick/add-pr-comment@64b8e914979889d746c99dea15a76e77ef64580a # v3.10.0
if: ${{ github.event.pull_request.head.repo.fork }}
with:
github-token: ${{ steps.token.outputs.token }}
message-id: 'preview-status'
message: 'PRs from forks cannot have preview environments.'
- uses: mshick/add-pr-comment@ffd016c7e151d97d69d21a843022fd4cd5b96fe5 # v3.9.0
- uses: mshick/add-pr-comment@64b8e914979889d746c99dea15a76e77ef64580a # v3.10.0
if: ${{ !github.event.pull_request.head.repo.fork }}
with:
github-token: ${{ steps.token.outputs.token }}

View File

@@ -26,7 +26,7 @@ You can search the following types of content:
| Time frame | Start and end date of a specific time bucket |
| Media type | Image or video or both |
| Display options | In Archive, in Favorites or Not in any album |
| Start rating | User-assigned start rating |
| Star rating | User-assigned star rating |
<img src={require('./img/advanced-search-filters.webp').default} width="70%" title='Advanced search filters' />

View File

@@ -146,7 +146,7 @@ class URLSessionManager: NSObject {
private static func buildSession(delegate: URLSessionManagerDelegate) -> URLSession {
let config = URLSessionConfiguration.default
config.urlCache = urlCache
// config.urlCache = urlCache
config.httpCookieStorage = cookieStorage
config.httpMaximumConnectionsPerHost = 64
config.timeoutIntervalForRequest = 60

View File

@@ -119,7 +119,9 @@
<key>LSRequiresIPhoneOS</key>
<true/>
<key>LSSupportsOpeningDocumentsInPlace</key>
<string>No</string>
<true/>
<key>UIFileSharingEnabled</key>
<true/>
<key>MGLMapboxMetricsEnabledSettingShownInApp</key>
<true/>
<key>NSAppTransportSecurity</key>

View File

@@ -147,7 +147,7 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
}
void _onAssetInit(Duration timeStamp) {
_preloader.preload(widget.initialIndex, context.sizeData);
// _preloader.preload(widget.initialIndex, context.sizeData);
_handleCasting();
}
@@ -158,7 +158,7 @@ class _AssetViewerState extends ConsumerState<AssetViewer> {
if (asset == null) return;
AssetViewer._setAsset(ref, asset);
_preloader.preload(index, context.sizeData);
// _preloader.preload(index, context.sizeData);
_handleCasting();
_stackChildrenKeepAlive?.close();
_stackChildrenKeepAlive = ref.read(stackChildrenNotifier(asset).notifier).ref.keepAlive();

View File

@@ -19,6 +19,7 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
static final _log = Logger('CancellableImageProviderMixin');
bool isCancelled = false;
bool isFinished = false;
ImageRequest? request;
CancelableOperation<ImageInfo?>? cachedOperation;
@@ -53,13 +54,15 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
Stream<ImageInfo> loadRequest(ImageRequest request, ImageDecoderCallback decode, {bool evictOnError = true}) async* {
if (isCancelled) {
this.request = null;
PaintingBinding.instance.imageCache.evict(this);
return;
}
try {
final image = await request.load(decode);
if ((image == null && evictOnError) || isCancelled) {
if (isCancelled) {
return;
}
if (image == null && evictOnError) {
PaintingBinding.instance.imageCache.evict(this);
return;
} else if (image == null) {
@@ -67,6 +70,9 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
}
yield image;
} catch (e, stack) {
if (isCancelled) {
return;
}
if (evictOnError) {
PaintingBinding.instance.imageCache.evict(this);
rethrow;
@@ -80,20 +86,24 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
Future<ui.Codec?> loadCodecRequest(ImageRequest request) async {
if (isCancelled) {
this.request = null;
PaintingBinding.instance.imageCache.evict(this);
return null;
}
try {
final codec = await request.loadCodec();
if (codec == null || isCancelled) {
if (isCancelled) {
codec?.dispose();
return null;
}
if (codec == null) {
PaintingBinding.instance.imageCache.evict(this);
return null;
}
return codec;
} catch (e) {
PaintingBinding.instance.imageCache.evict(this);
if (!isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
}
rethrow;
} finally {
this.request = null;
@@ -121,6 +131,8 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
@override
void cancel() {
isCancelled = true;
final hasActiveWork = !isFinished;
final request = this.request;
if (request != null) {
this.request = null;
@@ -132,6 +144,10 @@ mixin CancellableImageProviderMixin<T extends Object> on CancellableImageProvide
cachedOperation = null;
operation.cancel();
}
if (hasActiveWork) {
PaintingBinding.instance.imageCache.evict(this);
}
}
}

View File

@@ -100,7 +100,6 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
yield* initialImageStream();
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -113,24 +112,24 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
yield* loadRequest(request, decode);
if (!Store.get(StoreKey.loadOriginal, false)) {
isFinished = true;
return;
}
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
request = this.request = LocalImageRequest(localId: key.id, assetType: key.assetType, size: Size.zero);
yield* loadRequest(request, decode);
isFinished = true;
}
Stream<Object> _animatedCodec(LocalFullImageProvider key, ImageDecoderCallback decode) async* {
yield* initialImageStream();
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -143,7 +142,6 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
yield* loadRequest(previewRequest, decode);
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -151,9 +149,11 @@ class LocalFullImageProvider extends CancellableImageProvider<LocalFullImageProv
final originalRequest = request = LocalImageRequest(localId: key.id, size: Size.zero, assetType: key.assetType);
final codec = await loadCodecRequest(originalRequest);
if (codec == null) {
if (isCancelled) return;
throw StateError('Failed to load animated codec for local asset ${key.id}');
}
yield codec;
isFinished = true;
}
@override

View File

@@ -105,7 +105,6 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
yield* initialImageStream();
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -116,23 +115,23 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
yield* loadRequest(previewRequest, decode, evictOnError: !loadOriginal);
if (!loadOriginal) {
isFinished = true;
return;
}
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
final originalRequest = request = RemoteImageRequest(uri: getOriginalUrlForRemoteId(key.assetId));
yield* loadRequest(originalRequest, decode);
isFinished = true;
}
Stream<Object> _animatedCodec(RemoteFullImageProvider key, ImageDecoderCallback decode) async* {
yield* initialImageStream();
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -142,7 +141,6 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
yield* loadRequest(previewRequest, decode, evictOnError: false);
if (isCancelled) {
PaintingBinding.instance.imageCache.evict(this);
return;
}
@@ -150,9 +148,13 @@ class RemoteFullImageProvider extends CancellableImageProvider<RemoteFullImagePr
final originalRequest = request = RemoteImageRequest(uri: getOriginalUrlForRemoteId(key.assetId));
final codec = await loadCodecRequest(originalRequest);
if (codec == null) {
if (isCancelled) {
return;
}
throw StateError('Failed to load animated codec for asset ${key.assetId}');
}
yield codec;
isFinished = true;
}
@override

View File

@@ -8,11 +8,16 @@ import 'package:immich_mobile/presentation/widgets/images/image_provider.dart';
import 'package:immich_mobile/presentation/widgets/images/remote_image_provider.dart';
import 'package:immich_mobile/presentation/widgets/images/thumb_hash_provider.dart';
import 'package:immich_mobile/presentation/widgets/timeline/constants.dart';
import 'package:immich_mobile/utils/image_load_histogram.dart';
import 'package:logging/logging.dart';
final log = Logger('ThumbnailWidget');
enum ThumbhashMode { enabled, disabled, only }
enum ImageType { thumbnail }
final remoteImageHistogram = Histogram<ImageType>(maxSamples: 8192, values: ImageType.values);
int thumbnailId = 0;
class Thumbnail extends StatefulWidget {
final ImageProvider? imageProvider;
@@ -111,8 +116,11 @@ class _ThumbnailState extends State<Thumbnail> with SingleTickerProviderStateMix
if (imageProvider == null) return;
final imageStream = _imageStream = imageProvider.resolve(ImageConfiguration.empty);
final stopwatch = Stopwatch();
final curThumbnailId = thumbnailId++;
final imageStreamListener = _imageStreamListener = ImageStreamListener(
(ImageInfo imageInfo, bool synchronousCall) {
stopwatch.stop();
_stopListeningToThumbhashStream();
if (!mounted) {
imageInfo.dispose();
@@ -123,7 +131,27 @@ class _ThumbnailState extends State<Thumbnail> with SingleTickerProviderStateMix
return;
}
if ((synchronousCall && _providerImage == null) || !_isVisible()) {
final renderObject = context.findRenderObject() as RenderBox?;
final double topLeft;
final double bottomRight;
final double contextHeight = context.height;
if (renderObject == null || !renderObject.attached) {
topLeft = double.maxFinite;
bottomRight = double.maxFinite;
} else {
topLeft = renderObject.localToGlobal(Offset.zero).dy;
bottomRight = renderObject.localToGlobal(Offset(renderObject.size.width, renderObject.size.height)).dy;
}
remoteImageHistogram.record(
ImageType.thumbnail,
stopwatch.elapsedMicroseconds,
topLeft.toInt(),
bottomRight.toInt(),
contextHeight.toInt(),
curThumbnailId,
);
if ((synchronousCall && _providerImage == null) || !(topLeft < contextHeight && bottomRight > 0)) {
_fadeController.value = 1.0;
} else if (_fadeController.isAnimating) {
_fadeController.forward();
@@ -146,6 +174,7 @@ class _ThumbnailState extends State<Thumbnail> with SingleTickerProviderStateMix
_stopListeningToImageStream();
},
);
stopwatch.start();
imageStream.addListener(imageStreamListener);
}

View File

@@ -7,6 +7,7 @@ import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter/rendering.dart';
import 'package:flutter/scheduler.dart';
import 'package:hooks_riverpod/hooks_riverpod.dart';
import 'package:immich_mobile/domain/models/asset/base_asset.model.dart';
import 'package:immich_mobile/domain/models/events.model.dart';
@@ -17,6 +18,7 @@ import 'package:immich_mobile/extensions/asyncvalue_extensions.dart';
import 'package:immich_mobile/extensions/build_context_extensions.dart';
import 'package:immich_mobile/presentation/widgets/action_buttons/download_status_floating_button.widget.dart';
import 'package:immich_mobile/presentation/widgets/bottom_sheet/general_bottom_sheet.widget.dart';
import 'package:immich_mobile/presentation/widgets/images/thumbnail.widget.dart';
import 'package:immich_mobile/presentation/widgets/timeline/constants.dart';
import 'package:immich_mobile/presentation/widgets/timeline/scrubber.widget.dart';
import 'package:immich_mobile/presentation/widgets/timeline/segment.model.dart';
@@ -140,10 +142,14 @@ class _SliverTimeline extends ConsumerStatefulWidget {
ConsumerState createState() => _SliverTimelineState();
}
class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
class _SliverTimelineState extends ConsumerState<_SliverTimeline> with SingleTickerProviderStateMixin {
late final ScrollController _scrollController;
StreamSubscription? _eventSubscription;
Ticker? _autoScrollTicker;
Duration _lastTickTime = Duration.zero;
static const _autoScrollVelocity = 4800.0; // pixels per second
// Drag selection state
bool _dragging = false;
TimelineAssetIndex? _dragAnchorIndex;
@@ -246,11 +252,52 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
@override
void dispose() {
_stopAutoScroll();
_scrollController.dispose();
_eventSubscription?.cancel();
super.dispose();
}
void _toggleAutoScroll() {
if (_autoScrollTicker?.isActive ?? false) {
_stopAutoScroll();
} else {
_startAutoScroll();
}
}
void _startAutoScroll() {
_lastTickTime = Duration.zero;
_autoScrollTicker = createTicker(_onAutoScrollTick)..start();
}
void _stopAutoScroll() {
_autoScrollTicker?.stop();
_autoScrollTicker?.dispose();
_autoScrollTicker = null;
}
void _onAutoScrollTick(Duration elapsed) {
if (_lastTickTime == Duration.zero) {
_lastTickTime = elapsed;
return;
}
final deltaSeconds = (elapsed - _lastTickTime).inMicroseconds / 1000000.0;
_lastTickTime = elapsed;
final newOffset = _scrollController.offset + (_autoScrollVelocity * deltaSeconds);
final maxOffset = _scrollController.position.maxScrollExtent;
if (newOffset >= maxOffset || remoteImageHistogram.count(ImageType.thumbnail) >= remoteImageHistogram.maxSamples) {
_scrollController.jumpTo(newOffset.clamp(0, maxOffset));
_stopAutoScroll();
remoteImageHistogram.logAll();
remoteImageHistogram.save();
} else {
_scrollController.jumpTo(newOffset);
}
}
void _scrollToDate(DateTime date) {
final asyncSegments = ref.read(timelineSegmentProvider);
asyncSegments.whenData((segments) {
@@ -434,6 +481,16 @@ class _SliverTimelineState extends ConsumerState<_SliverTimeline> {
controller: _scrollController,
child: RawGestureDetector(
gestures: {
SerialTapGestureRecognizer: GestureRecognizerFactoryWithHandlers<SerialTapGestureRecognizer>(
() => SerialTapGestureRecognizer(),
(SerialTapGestureRecognizer tap) {
tap.onSerialTapDown = (details) {
if (details.count == 3) {
_toggleAutoScroll();
}
};
},
),
CustomScaleGestureRecognizer: GestureRecognizerFactoryWithHandlers<CustomScaleGestureRecognizer>(
() => CustomScaleGestureRecognizer(),
(CustomScaleGestureRecognizer scale) {

View File

@@ -0,0 +1,154 @@
import 'dart:convert';
import 'dart:io';
import 'dart:math';
import 'dart:typed_data';
import 'package:logging/logging.dart';
import 'package:path_provider/path_provider.dart';
import 'package:share_plus/share_plus.dart';
/// Ring buffer histogram for performance profiling.
class Histogram<T extends Enum> {
final int _stride;
final int _strideMask;
final List<T> _values;
final Int64List _counts;
final Int64List _data;
final Stopwatch _clock;
static final _log = Logger('Histogram');
Histogram({required int maxSamples, required List<T> values})
: assert(maxSamples & (maxSamples - 1) == 0, 'maxSamples must be power of 2'),
_stride = maxSamples,
_strideMask = maxSamples - 1,
_values = values,
_counts = Int64List(values.length),
_data = Int64List(maxSamples * values.length * 6),
_clock = Stopwatch()..start();
@pragma("vm:prefer-inline")
@pragma("vm:unsafe:no-bounds-checks")
void record(T type, int microseconds, int topLeft, int bottomRight, int contextHeight, int id) {
final i = type.index;
final count = _counts[i];
final slot = count & _strideMask;
final offset = (i * _stride + slot) * 6;
_data[offset] = microseconds;
_data[offset + 1] = _clock.elapsedMicroseconds;
_data[offset + 2] = topLeft;
_data[offset + 3] = bottomRight;
_data[offset + 4] = contextHeight;
_data[offset + 5] = id;
_counts[i] = count + 1;
}
int count(T type) => _counts[type.index].clamp(0, _stride);
int get maxSamples => _stride;
@pragma("vm:unsafe:no-bounds-checks")
void log(T type) {
final index = type.index;
final total = _counts[index];
final count = min(total, _stride);
if (count == 0) return;
final baseOffset = index * _stride * 6;
final scratch = Int64List(count);
for (int i = 0; i < count; i++) {
scratch[i] = _data[baseOffset + i * 6];
}
scratch.sort();
int sum = 0;
for (int i = 0; i < count; i++) {
sum += scratch[i];
}
_log.info(
'${type.name} (n=$total, sampled=$count) - '
'Avg: ${(sum / count / 1000.0).toStringAsFixed(2)}ms, '
'Min: ${(scratch[0] / 1000.0).toStringAsFixed(2)}ms, '
'Max: ${(scratch[count - 1] / 1000.0).toStringAsFixed(2)}ms, '
'P25: ${(_percentile(scratch, count, 0.25) / 1000.0).toStringAsFixed(2)}ms, '
'P50: ${(_percentile(scratch, count, 0.50) / 1000.0).toStringAsFixed(2)}ms, '
'P75: ${(_percentile(scratch, count, 0.75) / 1000.0).toStringAsFixed(2)}ms, '
'P90: ${(_percentile(scratch, count, 0.90) / 1000.0).toStringAsFixed(2)}ms, '
'P95: ${(_percentile(scratch, count, 0.95) / 1000.0).toStringAsFixed(2)}ms, '
'P99: ${(_percentile(scratch, count, 0.99) / 1000.0).toStringAsFixed(2)}ms',
);
}
void logAll() {
for (final value in _values) {
log(value);
}
}
@pragma("vm:unsafe:no-bounds-checks")
(Int64List, Int64List, Int64List, Int64List, Int64List, Int64List) getSamples(T type) {
final index = type.index;
final count = min(_counts[index], _stride);
final samples = Int64List(count);
final timestamps = Int64List(count);
final topLeft = Int64List(count);
final bottomRight = Int64List(count);
final contextHeight = Int64List(count);
final id = Int64List(count);
final baseOffset = index * _stride * 6;
for (int i = 0; i < count; i++) {
samples[i] = _data[baseOffset + i * 6];
timestamps[i] = _data[baseOffset + i * 6 + 1];
topLeft[i] = _data[baseOffset + i * 6 + 2];
bottomRight[i] = _data[baseOffset + i * 6 + 3];
contextHeight[i] = _data[baseOffset + i * 6 + 4];
id[i] = _data[baseOffset + i * 6 + 5];
}
return (samples, timestamps, topLeft, bottomRight, contextHeight, id);
}
@pragma("vm:unsafe:no-bounds-checks")
Future<File> save({bool share = true}) async {
final dir = await getApplicationDocumentsDirectory();
final timestamp = DateTime.now().toIso8601String().replaceAll(':', '-');
final file = File('${dir.path}/samples_$timestamp.json');
final data = {};
for (int i = 0; i < _counts.length; i++) {
final name = _values[i].name;
final (samples, timestamps, topLeft, bottomRight, contextHeight, id) = getSamples(_values[i]);
data['${name}_us'] = samples;
data['${name}_ts'] = timestamps;
data['${name}_top_left'] = topLeft;
data['${name}_bottom_right'] = bottomRight;
data['${name}_context_height'] = contextHeight;
data['${name}_id'] = id;
}
data['timestamp'] = DateTime.now().toIso8601String();
await file.writeAsString(jsonEncode(data));
_log.info('Saved samples to ${file.path}');
if (share) {
await Share.shareXFiles([XFile(file.path)]);
}
return file;
}
void reset(T type) {
_counts[type.index] = 0;
}
void resetAll() {
_counts.fillRange(0, _counts.length, 0);
}
@pragma("vm:prefer-inline")
int _percentile(Int64List sorted, int count, double p) {
final idx = ((count - 1) * p).round();
return sorted[idx];
}
}

View File

@@ -848,6 +848,7 @@ export enum AssetVisibility {
export enum CronJob {
LibraryScan = 'LibraryScan',
NightlyJobs = 'NightlyJobs',
VersionCheck = 'VersionCheck',
}
export enum ApiTag {

View File

@@ -36,15 +36,17 @@ export class MaintenanceHealthRepository {
}
});
worker.on('exit', (code, signal) => reject(`Server health check failed, server exited with ${signal ?? code}`));
worker.on('error', (error) => reject(`Server health check failed, process threw: ${error}`));
worker.on('exit', (code, signal) =>
reject(new Error(`Server health check failed, server exited with ${signal ?? code}`)),
);
worker.on('error', (error) => reject(new Error(`Server health check failed, process threw: ${error}`)));
setTimeout(() => {
if (worker.exitCode === null) {
reject('Server health check failed, took too long to start.');
reject(new Error('Server health check failed, took too long to start.'));
worker.kill('SIGTERM');
}
}, 20_000);
}, 180_000);
});
}
}

View File

@@ -1,14 +1,11 @@
import { Injectable, NotAcceptableException } from '@nestjs/common';
import { Interval } from '@nestjs/schedule';
import { NextFunction, Request, Response } from 'express';
import { readFileSync } from 'node:fs';
import sanitizeHtml from 'sanitize-html';
import { ONE_HOUR } from 'src/constants';
import { ConfigRepository } from 'src/repositories/config.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
import { AuthService } from 'src/services/auth.service';
import { SharedLinkService } from 'src/services/shared-link.service';
import { VersionService } from 'src/services/version.service';
import { OpenGraphTags } from 'src/utils/misc';
export const render = (index: string, meta: OpenGraphTags) => {
@@ -40,18 +37,12 @@ export class ApiService {
constructor(
private authService: AuthService,
private sharedLinkService: SharedLinkService,
private versionService: VersionService,
private configRepository: ConfigRepository,
private logger: LoggingRepository,
) {
this.logger.setContext(ApiService.name);
}
@Interval(ONE_HOUR.as('milliseconds'))
async onVersionCheck() {
await this.versionService.handleQueueVersionCheck();
}
ssr(excludePaths: string[]) {
const { resourcePaths } = this.configRepository.getEnv();

View File

@@ -2,7 +2,7 @@ import { DateTime } from 'luxon';
import { SemVer } from 'semver';
import { defaults } from 'src/config';
import { serverVersion } from 'src/constants';
import { JobName, JobStatus, SystemMetadataKey } from 'src/enum';
import { CronJob, JobName, JobStatus, SystemMetadataKey } from 'src/enum';
import { VersionService } from 'src/services/version.service';
import { factory } from 'test/small.factory';
import { newTestService, ServiceMocks } from 'test/utils';
@@ -18,6 +18,8 @@ describe(VersionService.name, () => {
beforeEach(() => {
({ sut, mocks } = newTestService(VersionService));
mocks.cron.create.mockResolvedValue();
mocks.cron.update.mockResolvedValue();
});
it('should work', () => {
@@ -44,6 +46,20 @@ describe(VersionService.name, () => {
await expect(sut.onBootstrap()).resolves.toBeUndefined();
expect(mocks.versionHistory.create).not.toHaveBeenCalled();
});
it('should create a version check cron job', async () => {
mocks.versionHistory.getLatest.mockResolvedValue({
id: 'version-1',
createdAt: new Date(),
version: serverVersion.toString(),
});
await sut.onBootstrap();
expect(mocks.cron.create).toHaveBeenCalledWith(
expect.objectContaining({
name: CronJob.VersionCheck,
}),
);
});
});
describe('getVersion', () => {
@@ -72,25 +88,13 @@ describe(VersionService.name, () => {
});
describe('handVersionCheck', () => {
it('should not run if the last check was < 60 minutes ago', async () => {
mocks.systemMetadata.get.mockResolvedValue({
checkedAt: DateTime.utc().minus({ minutes: 5 }).toISO(),
releaseVersion: '1.0.0',
});
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped);
});
it('should not run if version check is disabled', async () => {
mocks.systemMetadata.get.mockResolvedValue({ newVersionCheck: { enabled: false } });
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Skipped);
});
it('should run if it has been > 60 minutes', async () => {
it('should run and notify if a new version is available', async () => {
mocks.serverInfo.getLatestRelease.mockResolvedValue(mockVersionResponse('v100.0.0'));
mocks.systemMetadata.get.mockResolvedValue({
checkedAt: DateTime.utc().minus({ minutes: 65 }).toISO(),
releaseVersion: '1.0.0',
});
await expect(sut.handleVersionCheck()).resolves.toEqual(JobStatus.Success);
expect(mocks.systemMetadata.set).toHaveBeenCalled();
expect(mocks.logger.log).toHaveBeenCalled();

View File

@@ -4,10 +4,11 @@ import semver, { SemVer } from 'semver';
import { serverVersion } from 'src/constants';
import { OnEvent, OnJob } from 'src/decorators';
import { ReleaseNotification, ServerVersionResponseDto } from 'src/dtos/server.dto';
import { DatabaseLock, JobName, JobStatus, QueueName, SystemMetadataKey } from 'src/enum';
import { CronJob, DatabaseLock, JobName, JobStatus, QueueName, SystemMetadataKey } from 'src/enum';
import { ArgOf } from 'src/repositories/event.repository';
import { BaseService } from 'src/services/base.service';
import { VersionCheckMetadata } from 'src/types';
import { handlePromiseError } from 'src/utils/misc';
const asNotification = ({ checkedAt, releaseVersion }: VersionCheckMetadata): ReleaseNotification => {
return {
@@ -24,6 +25,15 @@ export class VersionService extends BaseService {
async onBootstrap(): Promise<void> {
await this.handleVersionCheck();
const randomMinute = Math.floor(Math.random() * 60);
const expression = `${randomMinute} * * * *`;
this.logger.debug(`Scheduling version check for cron ${expression}`);
this.cronRepository.create({
name: CronJob.VersionCheck,
expression,
onTick: () => handlePromiseError(this.handleQueueVersionCheck(), this.logger),
});
await this.databaseRepository.withLock(DatabaseLock.VersionHistory, async () => {
const previous = await this.versionRepository.getLatest();
const current = serverVersion.toString();
@@ -76,16 +86,6 @@ export class VersionService extends BaseService {
return JobStatus.Skipped;
}
const versionCheck = await this.systemMetadataRepository.get(SystemMetadataKey.VersionCheckState);
if (versionCheck?.checkedAt) {
const lastUpdate = DateTime.fromISO(versionCheck.checkedAt);
const elapsedTime = DateTime.now().diff(lastUpdate).as('minutes');
// check once per hour (max)
if (elapsedTime < 60) {
return JobStatus.Skipped;
}
}
const { version: releaseVersion, published_at: publishedAt } = await this.serverInfoRepository.getLatestRelease();
const metadata: VersionCheckMetadata = { checkedAt: DateTime.utc().toISO(), releaseVersion };

View File

@@ -26,6 +26,7 @@ import { AssetEditRepository } from 'src/repositories/asset-edit.repository';
import { AssetJobRepository } from 'src/repositories/asset-job.repository';
import { AssetRepository } from 'src/repositories/asset.repository';
import { ConfigRepository } from 'src/repositories/config.repository';
import { CronRepository } from 'src/repositories/cron.repository';
import { CryptoRepository } from 'src/repositories/crypto.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { EmailRepository } from 'src/repositories/email.repository';
@@ -500,6 +501,10 @@ const newMockRepository = <T>(key: ClassConstructor<T>) => {
});
}
case CronRepository: {
return automock(CronRepository, { args: [undefined, { setContext: () => {} }], strict: false });
}
case EmailRepository: {
return automock(EmailRepository, { args: [{ setContext: () => {} }] });
}

View File

@@ -1,6 +1,7 @@
import { Kysely } from 'kysely';
import { serverVersion } from 'src/constants';
import { JobName } from 'src/enum';
import { CronRepository } from 'src/repositories/cron.repository';
import { DatabaseRepository } from 'src/repositories/database.repository';
import { JobRepository } from 'src/repositories/job.repository';
import { LoggingRepository } from 'src/repositories/logging.repository';
@@ -16,7 +17,7 @@ const setup = (db?: Kysely<DB>) => {
return newMediumService(VersionService, {
database: db || defaultDatabase,
real: [DatabaseRepository, VersionHistoryRepository],
mock: [LoggingRepository, JobRepository],
mock: [LoggingRepository, JobRepository, CronRepository],
});
};