[backend/drive] Refactor ImageProcessor into a modular system

This commit lays the groundwork for a user-configurable image processing pipeline. It has exactly the same behavior as the old ImageProcessor, just modular & compartmentalized. It also adds support for AVIF & JXL encoding, though no code paths call it just yet.
This commit is contained in:
Laura Hausmann 2024-08-07 02:21:31 +02:00
parent 0a10218254
commit c07bb35548
No known key found for this signature in database
GPG key ID: D044E84C5BE01605
27 changed files with 809 additions and 488 deletions

View file

@ -82,10 +82,10 @@ public class MediaController(DriveService driveSvc, DatabaseContext db) : Contro
{ {
Id = file.Id, Id = file.Id,
Type = AttachmentEntity.GetType(file.Type), Type = AttachmentEntity.GetType(file.Type),
Url = file.PublicUrl, Url = file.AccessUrl,
Blurhash = file.Blurhash, Blurhash = file.Blurhash,
Description = file.Comment, Description = file.Comment,
PreviewUrl = file.PublicThumbnailUrl, PreviewUrl = file.ThumbnailAccessUrl,
RemoteUrl = file.Uri, RemoteUrl = file.Uri,
Sensitive = file.IsSensitive Sensitive = file.IsSensitive
//Metadata = TODO, //Metadata = TODO,

View file

@ -252,9 +252,9 @@ public class NoteRenderer(
.Select(f => new AttachmentEntity .Select(f => new AttachmentEntity
{ {
Id = f.Id, Id = f.Id,
Url = f.PublicUrl, Url = f.AccessUrl,
Blurhash = f.Blurhash, Blurhash = f.Blurhash,
PreviewUrl = f.PublicThumbnailUrl, PreviewUrl = f.ThumbnailAccessUrl,
Description = f.Comment, Description = f.Comment,
Metadata = null, Metadata = null,
RemoteUrl = f.Uri, RemoteUrl = f.Uri,
@ -272,9 +272,9 @@ public class NoteRenderer(
.Select(f => new AttachmentEntity .Select(f => new AttachmentEntity
{ {
Id = f.Id, Id = f.Id,
Url = f.PublicUrl, Url = f.AccessUrl,
Blurhash = f.Blurhash, Blurhash = f.Blurhash,
PreviewUrl = f.PublicThumbnailUrl, PreviewUrl = f.ThumbnailAccessUrl,
Description = f.Comment, Description = f.Comment,
Metadata = null, Metadata = null,
RemoteUrl = f.Uri, RemoteUrl = f.Uri,
@ -444,11 +444,11 @@ public class NoteRenderer(
public List<AccountEntity>? Accounts; public List<AccountEntity>? Accounts;
public List<AttachmentEntity>? Attachments; public List<AttachmentEntity>? Attachments;
public List<string>? BookmarkedNotes; public List<string>? BookmarkedNotes;
public List<string>? MutedNotes;
public List<EmojiEntity>? Emoji; public List<EmojiEntity>? Emoji;
public List<Filter>? Filters; public List<Filter>? Filters;
public List<string>? LikedNotes; public List<string>? LikedNotes;
public List<MentionEntity>? Mentions; public List<MentionEntity>? Mentions;
public List<string>? MutedNotes;
public List<string>? PinnedNotes; public List<string>? PinnedNotes;
public List<PollEntity>? Polls; public List<PollEntity>? Polls;
public List<ReactionEntity>? Reactions; public List<ReactionEntity>? Reactions;

View file

@ -30,8 +30,8 @@ public class DriveController(
public async Task<IActionResult> GetFileByAccessKey(string accessKey) public async Task<IActionResult> GetFileByAccessKey(string accessKey)
{ {
var file = await db.DriveFiles.FirstOrDefaultAsync(p => p.AccessKey == accessKey || var file = await db.DriveFiles.FirstOrDefaultAsync(p => p.AccessKey == accessKey ||
p.WebpublicAccessKey == accessKey || p.PublicAccessKey == accessKey ||
p.ThumbnailAccessKey == accessKey); p.ThumbnailAccessUrl == accessKey);
if (file == null) if (file == null)
{ {
Response.Headers.CacheControl = "max-age=86400"; Response.Headers.CacheControl = "max-age=86400";
@ -110,8 +110,8 @@ public class DriveController(
return new DriveFileResponse return new DriveFileResponse
{ {
Id = file.Id, Id = file.Id,
Url = file.PublicUrl, Url = file.AccessUrl,
ThumbnailUrl = file.PublicThumbnailUrl, ThumbnailUrl = file.ThumbnailAccessUrl,
Filename = file.Name, Filename = file.Name,
ContentType = file.Type, ContentType = file.Type,
Description = file.Comment, Description = file.Comment,

View file

@ -113,8 +113,8 @@ public class NoteRenderer(
return files.Select(p => new NoteAttachment return files.Select(p => new NoteAttachment
{ {
Id = p.Id, Id = p.Id,
Url = p.PublicUrl, Url = p.AccessUrl,
ThumbnailUrl = p.PublicThumbnailUrl, ThumbnailUrl = p.ThumbnailAccessUrl,
ContentType = p.Type, ContentType = p.Type,
Blurhash = p.Blurhash, Blurhash = p.Blurhash,
AltText = p.Comment, AltText = p.Comment,

View file

@ -16,9 +16,9 @@ public static class Enums
public enum ImageProcessor public enum ImageProcessor
{ {
ImageSharp, None = 0,
LibVips, ImageSharp = 1,
None LibVips = 2
} }
public enum ItemVisibility public enum ItemVisibility

View file

@ -88,8 +88,8 @@ public class DatabaseContext(DbContextOptions<DatabaseContext> options)
public virtual DbSet<CacheEntry> CacheStore { get; init; } = null!; public virtual DbSet<CacheEntry> CacheStore { get; init; } = null!;
public virtual DbSet<Job> Jobs { get; init; } = null!; public virtual DbSet<Job> Jobs { get; init; } = null!;
public virtual DbSet<Filter> Filters { get; init; } = null!; public virtual DbSet<Filter> Filters { get; init; } = null!;
public virtual DbSet<DataProtectionKey> DataProtectionKeys { get; init; } = null!;
public virtual DbSet<PluginStoreEntry> PluginStore { get; init; } = null!; public virtual DbSet<PluginStoreEntry> PluginStore { get; init; } = null!;
public virtual DbSet<DataProtectionKey> DataProtectionKeys { get; init; } = null!;
public static NpgsqlDataSource GetDataSource(Config.DatabaseSection config) public static NpgsqlDataSource GetDataSource(Config.DatabaseSection config)
{ {
@ -384,7 +384,7 @@ public class DatabaseContext(DbContextOptions<DatabaseContext> options)
entity.Property(e => e.Url).HasComment("The URL of the DriveFile."); entity.Property(e => e.Url).HasComment("The URL of the DriveFile.");
entity.Property(e => e.UserHost).HasComment("The host of owner. It will be null if the user in local."); entity.Property(e => e.UserHost).HasComment("The host of owner. It will be null if the user in local.");
entity.Property(e => e.UserId).HasComment("The owner ID."); entity.Property(e => e.UserId).HasComment("The owner ID.");
entity.Property(e => e.WebpublicUrl).HasComment("The URL of the webpublic of the DriveFile."); entity.Property(e => e.PublicUrl).HasComment("The URL of the webpublic of the DriveFile.");
entity.HasOne(d => d.Folder) entity.HasOne(d => d.Folder)
.WithMany(p => p.DriveFiles) .WithMany(p => p.DriveFiles)

View file

@ -802,6 +802,22 @@ namespace Iceshrimp.Backend.Core.Database.Migrations
.HasDefaultValueSql("'{}'::jsonb") .HasDefaultValueSql("'{}'::jsonb")
.HasComment("The any properties of the DriveFile. For example, it includes image width/height."); .HasComment("The any properties of the DriveFile. For example, it includes image width/height.");
b.Property<string>("PublicAccessKey")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("webpublicAccessKey");
b.Property<string>("PublicMimeType")
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("webpublicType");
b.Property<string>("PublicUrl")
.HasMaxLength(512)
.HasColumnType("character varying(512)")
.HasColumnName("webpublicUrl")
.HasComment("The URL of the webpublic of the DriveFile.");
b.Property<Dictionary<string, string>>("RequestHeaders") b.Property<Dictionary<string, string>>("RequestHeaders")
.ValueGeneratedOnAdd() .ValueGeneratedOnAdd()
.HasColumnType("jsonb") .HasColumnType("jsonb")
@ -838,6 +854,11 @@ namespace Iceshrimp.Backend.Core.Database.Migrations
.HasColumnType("character varying(256)") .HasColumnType("character varying(256)")
.HasColumnName("thumbnailAccessKey"); .HasColumnName("thumbnailAccessKey");
b.Property<string>("ThumbnailMimeType")
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("thumbnailType");
b.Property<string>("ThumbnailUrl") b.Property<string>("ThumbnailUrl")
.HasMaxLength(512) .HasMaxLength(512)
.HasColumnType("character varying(512)") .HasColumnType("character varying(512)")
@ -876,22 +897,6 @@ namespace Iceshrimp.Backend.Core.Database.Migrations
.HasColumnName("userId") .HasColumnName("userId")
.HasComment("The owner ID."); .HasComment("The owner ID.");
b.Property<string>("WebpublicAccessKey")
.HasMaxLength(256)
.HasColumnType("character varying(256)")
.HasColumnName("webpublicAccessKey");
b.Property<string>("WebpublicType")
.HasMaxLength(128)
.HasColumnType("character varying(128)")
.HasColumnName("webpublicType");
b.Property<string>("WebpublicUrl")
.HasMaxLength(512)
.HasColumnType("character varying(512)")
.HasColumnName("webpublicUrl")
.HasComment("The URL of the webpublic of the DriveFile.");
b.HasKey("Id"); b.HasKey("Id");
b.HasIndex("AccessKey"); b.HasIndex("AccessKey");
@ -904,6 +909,8 @@ namespace Iceshrimp.Backend.Core.Database.Migrations
b.HasIndex("IsSensitive"); b.HasIndex("IsSensitive");
b.HasIndex("PublicAccessKey");
b.HasIndex("Sha256"); b.HasIndex("Sha256");
b.HasIndex("ThumbnailAccessKey"); b.HasIndex("ThumbnailAccessKey");
@ -916,8 +923,6 @@ namespace Iceshrimp.Backend.Core.Database.Migrations
b.HasIndex("UserId"); b.HasIndex("UserId");
b.HasIndex("WebpublicAccessKey");
b.HasIndex("UserId", "FolderId", "Id"); b.HasIndex("UserId", "FolderId", "Id");
b.ToTable("drive_file"); b.ToTable("drive_file");

View file

@ -0,0 +1,36 @@
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore.Infrastructure;
#nullable disable
namespace Iceshrimp.Backend.Core.Database.Migrations
{
/// <inheritdoc />
[DbContext(typeof(DatabaseContext))]
[Migration("20240808010539_AddDriveFileThumbnailTypeColumn")]
public partial class AddDriveFileThumbnailTypeColumn : Migration
{
/// <inheritdoc />
protected override void Up(MigrationBuilder migrationBuilder)
{
migrationBuilder.AddColumn<string>(
name: "thumbnailType",
table: "drive_file",
type: "character varying(128)",
maxLength: 128,
nullable: true);
migrationBuilder.Sql("""
UPDATE "drive_file" SET "thumbnailType" = 'image/webp' WHERE "thumbnailAccessKey" IS NOT NULL AND "thumbnailUrl" IS NOT NULL;
""");
}
/// <inheritdoc />
protected override void Down(MigrationBuilder migrationBuilder)
{
migrationBuilder.DropColumn(
name: "thumbnailType",
table: "drive_file");
}
}
}

View file

@ -14,7 +14,7 @@ namespace Iceshrimp.Backend.Core.Database.Tables;
[Index(nameof(Type))] [Index(nameof(Type))]
[Index(nameof(IsSensitive))] [Index(nameof(IsSensitive))]
[Index(nameof(FolderId))] [Index(nameof(FolderId))]
[Index(nameof(WebpublicAccessKey))] [Index(nameof(PublicAccessKey))]
[Index(nameof(CreatedAt))] [Index(nameof(CreatedAt))]
[Index(nameof(AccessKey))] [Index(nameof(AccessKey))]
[Index(nameof(Uri))] [Index(nameof(Uri))]
@ -101,7 +101,7 @@ public class DriveFile : IEntity
/// </summary> /// </summary>
[Column("webpublicUrl")] [Column("webpublicUrl")]
[StringLength(512)] [StringLength(512)]
public string? WebpublicUrl { get; set; } public string? PublicUrl { get; set; }
[Column("accessKey")] [Column("accessKey")]
[StringLength(256)] [StringLength(256)]
@ -113,7 +113,7 @@ public class DriveFile : IEntity
[Column("webpublicAccessKey")] [Column("webpublicAccessKey")]
[StringLength(256)] [StringLength(256)]
public string? WebpublicAccessKey { get; set; } public string? PublicAccessKey { get; set; }
/// <summary> /// <summary>
/// The URI of the DriveFile. it will be null when the DriveFile is local. /// The URI of the DriveFile. it will be null when the DriveFile is local.
@ -150,9 +150,13 @@ public class DriveFile : IEntity
[StringLength(128)] [StringLength(128)]
public string? Blurhash { get; set; } public string? Blurhash { get; set; }
[Column("thumbnailType")]
[StringLength(128)]
public string? ThumbnailMimeType { get; set; }
[Column("webpublicType")] [Column("webpublicType")]
[StringLength(128)] [StringLength(128)]
public string? WebpublicType { get; set; } public string? PublicMimeType { get; set; }
[Column("requestHeaders", TypeName = "jsonb")] [Column("requestHeaders", TypeName = "jsonb")]
public Dictionary<string, string>? RequestHeaders { get; set; } public Dictionary<string, string>? RequestHeaders { get; set; }
@ -184,8 +188,8 @@ public class DriveFile : IEntity
[InverseProperty(nameof(Tables.User.Banner))] [InverseProperty(nameof(Tables.User.Banner))]
public virtual User? UserBanner { get; set; } public virtual User? UserBanner { get; set; }
[NotMapped] public string PublicUrl => WebpublicUrl ?? Url; [NotMapped] public string AccessUrl => PublicUrl ?? Url;
[NotMapped] public string PublicThumbnailUrl => ThumbnailUrl ?? WebpublicUrl ?? Url; [NotMapped] public string ThumbnailAccessUrl => ThumbnailUrl ?? PublicUrl ?? Url;
[Key] [Key]
[Column("id")] [Column("id")]
@ -196,13 +200,5 @@ public class DriveFile : IEntity
{ {
[J("width")] public int? Width { get; set; } [J("width")] public int? Width { get; set; }
[J("height")] public int? Height { get; set; } [J("height")] public int? Height { get; set; }
[Obsolete("Deprecated property")]
[J("orientation")]
public int? Orientation { get; set; }
[Obsolete("Deprecated property")]
[J("avgColor")]
public string? AverageColor { get; set; }
} }
} }

View file

@ -45,4 +45,7 @@ public static class EnumerableExtensions
} }
public static IEnumerable<T> NotNull<T>(this IEnumerable<T?> @enum) => @enum.OfType<T>(); public static IEnumerable<T> NotNull<T>(this IEnumerable<T?> @enum) => @enum.OfType<T>();
public static IEnumerable<T> StructNotNull<T>(this IEnumerable<T?> @enum) where T : struct =>
@enum.Where(p => p.HasValue).Select(p => p!.Value);
} }

View file

@ -10,6 +10,7 @@ using Iceshrimp.Backend.Core.Federation.WebFinger;
using Iceshrimp.Backend.Core.Helpers.LibMfm.Conversion; using Iceshrimp.Backend.Core.Helpers.LibMfm.Conversion;
using Iceshrimp.Backend.Core.Middleware; using Iceshrimp.Backend.Core.Middleware;
using Iceshrimp.Backend.Core.Services; using Iceshrimp.Backend.Core.Services;
using Iceshrimp.Backend.Core.Services.ImageProcessing;
using Iceshrimp.Backend.SignalR.Authentication; using Iceshrimp.Backend.SignalR.Authentication;
using Iceshrimp.Shared.Configuration; using Iceshrimp.Shared.Configuration;
using Iceshrimp.Shared.Schemas.Web; using Iceshrimp.Shared.Schemas.Web;
@ -36,7 +37,7 @@ namespace Iceshrimp.Backend.Core.Extensions;
public static class ServiceExtensions public static class ServiceExtensions
{ {
public static void AddServices(this IServiceCollection services) public static void AddServices(this IServiceCollection services, IConfiguration configuration)
{ {
// Transient = instantiated per request and class // Transient = instantiated per request and class
@ -100,6 +101,24 @@ public static class ServiceExtensions
.AddSingleton<StreamingService>() .AddSingleton<StreamingService>()
.AddSingleton<ImageProcessor>(); .AddSingleton<ImageProcessor>();
var config = configuration.GetSection("Storage").Get<Config.StorageSection>() ??
throw new Exception("Failed to read storage config section");
switch (config.MediaProcessing.ImageProcessor)
{
case Enums.ImageProcessor.LibVips:
services.AddSingleton<IImageProcessor, VipsProcessor>();
services.AddSingleton<IImageProcessor, ImageSharpProcessor>();
break;
case Enums.ImageProcessor.ImageSharp:
services.AddSingleton<IImageProcessor, ImageSharpProcessor>();
break;
case Enums.ImageProcessor.None:
break;
default:
throw new ArgumentOutOfRangeException();
}
// Hosted services = long running background tasks // Hosted services = long running background tasks
// Note: These need to be added as a singleton as well to ensure data consistency // Note: These need to be added as a singleton as well to ensure data consistency
services.AddHostedService<CronService>(provider => provider.GetRequiredService<CronService>()); services.AddHostedService<CronService>(provider => provider.GetRequiredService<CronService>());

View file

@ -104,7 +104,7 @@ public class NoteRenderer(IOptions<Config.InstanceSection> config, MfmConverter
.Select(p => new ASDocument .Select(p => new ASDocument
{ {
Sensitive = p.IsSensitive, Sensitive = p.IsSensitive,
Url = new ASLink(p.WebpublicUrl ?? p.Url), Url = new ASLink(p.AccessUrl),
MediaType = p.Type, MediaType = p.Type,
Description = p.Comment Description = p.Comment
}) })

View file

@ -16,4 +16,9 @@ public static class DigestHelpers
var data = await SHA256.HashDataAsync(input); var data = await SHA256.HashDataAsync(input);
return Convert.ToHexString(data).ToLowerInvariant(); return Convert.ToHexString(data).ToLowerInvariant();
} }
public static async Task<string> Sha256DigestAsync(byte[] input)
{
return await Sha256DigestAsync(new MemoryStream(input));
}
} }

View file

@ -74,7 +74,7 @@ public class BackgroundTaskQueue(int parallelism)
if (!file.IsLink && !deduplicated) if (!file.IsLink && !deduplicated)
{ {
string?[] paths = [file.AccessKey, file.ThumbnailAccessKey, file.WebpublicAccessKey]; string?[] paths = [file.AccessKey, file.ThumbnailAccessKey, file.PublicAccessKey];
if (file.StoredInternal) if (file.StoredInternal)
{ {
@ -114,9 +114,9 @@ public class BackgroundTaskQueue(int parallelism)
file.IsLink = true; file.IsLink = true;
file.Url = file.Uri; file.Url = file.Uri;
file.ThumbnailUrl = null; file.ThumbnailUrl = null;
file.WebpublicUrl = null; file.PublicUrl = null;
file.ThumbnailAccessKey = null; file.ThumbnailAccessKey = null;
file.WebpublicAccessKey = null; file.PublicAccessKey = null;
file.StoredInternal = false; file.StoredInternal = false;
await db.Users.Where(p => p.AvatarId == file.Id) await db.Users.Where(p => p.AvatarId == file.Id)
@ -133,7 +133,7 @@ public class BackgroundTaskQueue(int parallelism)
if (deduplicated) if (deduplicated)
return; return;
string?[] paths = [file.AccessKey, file.ThumbnailAccessKey, file.WebpublicAccessKey]; string?[] paths = [file.AccessKey, file.ThumbnailAccessKey, file.PublicAccessKey];
if (file.StoredInternal) if (file.StoredInternal)
{ {
var pathBase = scope.GetRequiredService<IOptions<Config.StorageSection>>().Value.Local?.Path ?? var pathBase = scope.GetRequiredService<IOptions<Config.StorageSection>>().Value.Local?.Path ??

View file

@ -1,3 +1,5 @@
using System.Collections.Immutable;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis; using System.Diagnostics.CodeAnalysis;
using Iceshrimp.Backend.Core.Configuration; using Iceshrimp.Backend.Core.Configuration;
using Iceshrimp.Backend.Core.Database; using Iceshrimp.Backend.Core.Database;
@ -6,11 +8,15 @@ using Iceshrimp.Backend.Core.Extensions;
using Iceshrimp.Backend.Core.Helpers; using Iceshrimp.Backend.Core.Helpers;
using Iceshrimp.Backend.Core.Middleware; using Iceshrimp.Backend.Core.Middleware;
using Iceshrimp.Backend.Core.Queues; using Iceshrimp.Backend.Core.Queues;
using Iceshrimp.Backend.Core.Services.ImageProcessing;
using Microsoft.EntityFrameworkCore; using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using static Iceshrimp.Backend.Core.Services.ImageProcessing.ImageVersion;
namespace Iceshrimp.Backend.Core.Services; namespace Iceshrimp.Backend.Core.Services;
using ImageVerTriple = (ImageVersion format, string accessKey, string url);
public class DriveService( public class DriveService(
DatabaseContext db, DatabaseContext db,
ObjectStorageService storageSvc, ObjectStorageService storageSvc,
@ -163,7 +169,7 @@ public class DriveService(
DriveFile? file; DriveFile? file;
request.Filename = request.Filename.Trim('"'); request.Filename = request.Filename.Trim('"');
if (input == Stream.Null || user.IsRemoteUser && input.Length > storageConfig.Value.MaxCacheSizeBytes) if (input == Stream.Null || (user.IsRemoteUser && input.Length > storageConfig.Value.MaxCacheSizeBytes))
{ {
file = new DriveFile file = new DriveFile
{ {
@ -190,9 +196,11 @@ public class DriveService(
return file; return file;
} }
await using var data = new BufferedStream(input); var buf = new byte[input.Length];
using (var memoryStream = new MemoryStream(buf))
await input.CopyToAsync(memoryStream);
var digest = await DigestHelpers.Sha256DigestAsync(data); var digest = await DigestHelpers.Sha256DigestAsync(buf);
logger.LogDebug("Storing file {digest} for user {userId}", digest, user.Id); logger.LogDebug("Storing file {digest} for user {userId}", digest, user.Id);
file = await db.DriveFiles.FirstOrDefaultAsync(p => p.Sha256 == digest && (!p.IsLink || p.UserId == user.Id)); file = await db.DriveFiles.FirstOrDefaultAsync(p => p.Sha256 == digest && (!p.IsLink || p.UserId == user.Id));
if (file != null) if (file != null)
@ -214,13 +222,11 @@ public class DriveService(
return clonedFile; return clonedFile;
} }
data.Seek(0, SeekOrigin.Begin);
var storedInternal = storageConfig.Value.Provider == Enums.FileStorage.Local; var storedInternal = storageConfig.Value.Provider == Enums.FileStorage.Local;
var shouldCache = var shouldCache =
storageConfig.Value is { MediaRetentionTimeSpan: not null, MediaProcessing.LocalOnly: false } && storageConfig.Value is { MediaRetentionTimeSpan: not null, MediaProcessing.LocalOnly: false } &&
data.Length <= storageConfig.Value.MaxCacheSizeBytes; buf.Length <= storageConfig.Value.MaxCacheSizeBytes;
var shouldStore = user.IsLocalUser || shouldCache; var shouldStore = user.IsLocalUser || shouldCache;
@ -231,138 +237,64 @@ public class DriveService(
var properties = new DriveFile.FileProperties(); var properties = new DriveFile.FileProperties();
string url; ImageVerTriple? original = null;
string? thumbnailUrl = null; ImageVerTriple? thumbnail = null;
string? webpublicUrl = null; ImageVerTriple? @public = null;
var isReasonableSize = data.Length < storageConfig.Value.MediaProcessing.MaxFileSizeBytes; var isReasonableSize = buf.Length < storageConfig.Value.MediaProcessing.MaxFileSizeBytes;
var isImage = request.MimeType.StartsWith("image/") || request.MimeType == "image"; var isImage = request.MimeType.StartsWith("image/") || request.MimeType == "image";
var filename = GenerateFilenameKeepingExtension(request.Filename);
string? thumbnailKey = null;
string? webpublicKey = null;
if (shouldStore) if (shouldStore)
{ {
if (isImage && isReasonableSize) if (isImage && isReasonableSize)
{ {
var genThumb = !skipImageProcessing; var ident = imageProcessor.IdentifyImage(buf, request);
var genWebp = user.IsLocalUser && !skipImageProcessing; if (ident == null)
var res = await imageProcessor.ProcessImage(data, request, genThumb, genWebp);
properties = res?.Properties ?? properties;
blurhash = res?.Blurhash;
thumbnailKey = res?.RenderThumbnail != null ? GenerateWebpKey("thumbnail-") : null;
webpublicKey = res?.RenderWebpublic != null ? GenerateWebpKey("webpublic-") : null;
var webpFilename = request.Filename.EndsWith(".webp") ? request.Filename : $"{request.Filename}.webp";
if (storedInternal)
{ {
var pathBase = storageConfig.Value.Local?.Path ?? logger.LogWarning("imageProcessor.IdentifyImage() returned null, skipping image processing");
throw new Exception("Local storage path cannot be null"); original = await StoreOriginalFileOnly(input, request);
var path = Path.Combine(pathBase, filename);
data.Seek(0, SeekOrigin.Begin);
await using var writer = File.OpenWrite(path);
await data.CopyToAsync(writer);
url = $"https://{instanceConfig.Value.WebDomain}/files/{filename}";
if (thumbnailKey != null && res?.RenderThumbnail != null)
{
var thumbPath = Path.Combine(pathBase, thumbnailKey);
await using var thumbWriter = File.OpenWrite(thumbPath);
try
{
await res.RenderThumbnail(thumbWriter);
thumbnailUrl = $"https://{instanceConfig.Value.WebDomain}/files/{thumbnailKey}";
} }
catch (Exception e) else
{ {
logger.LogDebug("Failed to generate/write thumbnail: {e}", e.Message); if (ident.IsAnimated)
thumbnailKey = null; {
} logger.LogDebug("Image is animated, bypassing image processing...");
skipImageProcessing = true;
} }
if (webpublicKey != null && res?.RenderWebpublic != null) var formats = GetFormats(user, ident, request, skipImageProcessing);
var res = imageProcessor.ProcessImage(buf, ident, request, formats);
properties = res;
blurhash = res.Blurhash;
var processed = await res.RequestedFormats
.Select(p => ProcessAndStoreFileVersion(p.Key, p.Value, request.Filename))
.AwaitAllNoConcurrencyAsync()
.ContinueWithResult(p => p.ToImmutableArray());
original = processed.FirstOrDefault(p => p?.format.Key == KeyEnum.Original) ??
throw new Exception("Image processing didn't result in an original version");
thumbnail = processed.FirstOrDefault(p => p?.format.Key == KeyEnum.Thumbnail);
@public = processed.FirstOrDefault(p => p?.format.Key == KeyEnum.Public);
if (@public == null && user.IsLocalUser && !skipImageProcessing)
{ {
var webpPath = Path.Combine(pathBase, webpublicKey); var publicLocalFormat = storageConfig.Value.MediaProcessing.ImagePipeline.Public.Local.Format;
await using var webpWriter = File.OpenWrite(webpPath); if (publicLocalFormat is not ImageFormatEnum.Keep and not ImageFormatEnum.None)
try throw new Exception("Failed to re-encode image, bailing due to risk of metadata leakage");
{
await res.RenderWebpublic(webpWriter);
webpublicUrl = $"https://{instanceConfig.Value.WebDomain}/files/{webpublicKey}";
}
catch (Exception e)
{
logger.LogDebug("Failed to generate/write webp: {e}", e.Message);
webpublicKey = null;
} }
} }
} }
else else
{ {
data.Seek(0, SeekOrigin.Begin); original = await StoreOriginalFileOnly(input, request);
await storageSvc.UploadFileAsync(filename, request.MimeType, request.Filename, data);
url = storageSvc.GetFilePublicUrl(filename).AbsoluteUri;
if (thumbnailKey != null && res?.RenderThumbnail != null)
{
try
{
await using var stream = new MemoryStream();
await res.RenderThumbnail(stream);
stream.Seek(0, SeekOrigin.Begin);
await storageSvc.UploadFileAsync(thumbnailKey, "image/webp", webpFilename, stream);
thumbnailUrl = storageSvc.GetFilePublicUrl(thumbnailKey).AbsoluteUri;
}
catch (Exception e)
{
logger.LogDebug("Failed to generate/write thumbnail: {e}", e.Message);
thumbnailKey = null;
}
}
if (webpublicKey != null && res?.RenderWebpublic != null)
{
try
{
await using var stream = new MemoryStream();
await res.RenderWebpublic(stream);
stream.Seek(0, SeekOrigin.Begin);
await storageSvc.UploadFileAsync(webpublicKey, "image/webp", webpFilename, stream);
webpublicUrl = storageSvc.GetFilePublicUrl(webpublicKey).AbsoluteUri;
}
catch (Exception e)
{
logger.LogDebug("Failed to generate/write webp: {e}", e.Message);
webpublicKey = null;
}
}
} }
} }
else else
{ {
if (storedInternal) if (request.Uri == null)
{ throw new Exception("Uri must not be null at this stage");
var pathBase = storageConfig.Value.Local?.Path ??
throw new Exception("Local storage path cannot be null");
var path = Path.Combine(pathBase, filename);
await using var writer = File.OpenWrite(path);
await data.CopyToAsync(writer);
url = $"https://{instanceConfig.Value.WebDomain}/files/{filename}";
}
else
{
data.Seek(0, SeekOrigin.Begin);
await storageSvc.UploadFileAsync(filename, request.MimeType, request.Filename, data);
url = storageSvc.GetFilePublicUrl(filename).AbsoluteUri;
}
}
}
else
{
url = request.Uri ?? throw new Exception("Uri must not be null at this stage");
} }
file = new DriveFile file = new DriveFile
@ -372,14 +304,14 @@ public class DriveService(
User = user, User = user,
UserHost = user.Host, UserHost = user.Host,
Sha256 = digest, Sha256 = digest,
Size = (int)data.Length, Size = buf.Length,
IsLink = !shouldStore, IsLink = !shouldStore,
AccessKey = filename, AccessKey = original?.accessKey,
IsSensitive = request.IsSensitive, IsSensitive = request.IsSensitive,
StoredInternal = storedInternal, StoredInternal = storedInternal,
Src = request.Source, Src = request.Source,
Uri = request.Uri, Uri = request.Uri,
Url = url, Url = original?.url ?? request.Uri ?? throw new Exception("Uri must not be null here"),
Name = request.Filename, Name = request.Filename,
Comment = request.Comment, Comment = request.Comment,
Type = CleanMimeType(request.MimeType), Type = CleanMimeType(request.MimeType),
@ -387,11 +319,12 @@ public class DriveService(
RequestIp = request.RequestIp, RequestIp = request.RequestIp,
Blurhash = blurhash, Blurhash = blurhash,
Properties = properties, Properties = properties,
ThumbnailUrl = thumbnailUrl, ThumbnailUrl = thumbnail?.url,
ThumbnailAccessKey = thumbnailKey, ThumbnailAccessKey = thumbnail?.accessKey,
WebpublicType = webpublicUrl != null ? "image/webp" : null, ThumbnailMimeType = thumbnail?.format.Format.MimeType,
WebpublicUrl = webpublicUrl, PublicUrl = @public?.url,
WebpublicAccessKey = webpublicKey PublicAccessKey = @public?.accessKey,
PublicMimeType = @public?.format.Format.MimeType
}; };
await db.AddAsync(file); await db.AddAsync(file);
@ -400,6 +333,80 @@ public class DriveService(
return file; return file;
} }
private async Task<ImageVerTriple?> StoreOriginalFileOnly(
Stream input, DriveFileCreationRequest request
)
{
var accessKey = GenerateAccessKey(extension: Path.GetExtension(request.Filename));
var url = await StoreFileVersion(input, accessKey, request.Filename, request.MimeType);
return (Stub, accessKey, url);
}
private async Task<ImageVerTriple?> ProcessAndStoreFileVersion(
ImageVersion version, Func<Stream>? encode, string fileName
)
{
if (encode == null) return null;
var accessKey = GenerateAccessKey(version.Key.ToString().ToLowerInvariant(), version.Format.Extension);
Stream? stream = null;
try
{
try
{
var sw = Stopwatch.StartNew();
stream = encode();
sw.Stop();
logger.LogDebug("Encoding {version} image took {ms} ms",
version.Key.ToString().ToLowerInvariant(), sw.ElapsedMilliseconds);
}
catch (Exception e)
{
logger.LogWarning("Failed to process {ext} file version: {e}", version.Format.Extension, e.Message);
return null;
}
fileName = GenerateDerivedFileName(fileName, version.Format.Extension);
var url = await StoreFileVersion(stream, accessKey, fileName, version.Format.MimeType);
return (version, accessKey, url);
}
finally
{
if (stream != null)
await stream.DisposeAsync();
}
}
private Task<string> StoreFileVersion(Stream stream, string accessKey, string fileName, string mimeType)
{
return storageConfig.Value.Provider switch
{
Enums.FileStorage.Local => StoreFileVersionLocalStorage(stream, accessKey),
Enums.FileStorage.ObjectStorage => StoreFileVersionObjectStorage(stream, accessKey, fileName, mimeType),
_ => throw new ArgumentOutOfRangeException()
};
}
private async Task<string> StoreFileVersionLocalStorage(Stream stream, string filename)
{
var pathBase = storageConfig.Value.Local?.Path ??
throw new Exception("Local storage path cannot be null");
var path = Path.Combine(pathBase, filename);
await using var writer = File.OpenWrite(path);
stream.Seek(0, SeekOrigin.Begin);
await stream.CopyToAsync(writer);
return $"https://{instanceConfig.Value.WebDomain}/files/{filename}";
}
private async Task<string> StoreFileVersionObjectStorage(
Stream stream, string accessKey, string filename, string mimeType
)
{
stream.Seek(0, SeekOrigin.Begin);
await storageSvc.UploadFileAsync(accessKey, mimeType, filename, stream);
return storageSvc.GetFilePublicUrl(accessKey).AbsoluteUri;
}
public async Task RemoveFile(DriveFile file) public async Task RemoveFile(DriveFile file)
{ {
await RemoveFile(file.Id); await RemoveFile(file.Id);
@ -411,17 +418,19 @@ public class DriveService(
await queueSvc.BackgroundTaskQueue.EnqueueAsync(job); await queueSvc.BackgroundTaskQueue.EnqueueAsync(job);
} }
private static string GenerateFilenameKeepingExtension(string filename) private static string GenerateDerivedFileName(string filename, string newExt)
{ {
var guid = Guid.NewGuid().ToStringLower(); return filename.EndsWith($".{newExt}") ? filename : $"{filename}.{newExt}";
var ext = Path.GetExtension(filename);
return guid + ext;
} }
private static string GenerateWebpKey(string prefix = "") private static string GenerateAccessKey(string prefix = "", string extension = "webp")
{ {
var guid = Guid.NewGuid().ToStringLower(); var guid = Guid.NewGuid().ToStringLower();
return $"{prefix}{guid}.webp"; // @formatter:off
return prefix.Length > 0
? extension.Length > 0 ? $"{prefix}-{guid}.{extension}" : $"{prefix}-{guid}"
: extension.Length > 0 ? $"{guid}.{extension}" : guid;
// @formatter:on
} }
private static string CleanMimeType(string? mimeType) private static string CleanMimeType(string? mimeType)
@ -431,6 +440,29 @@ public class DriveService(
: mimeType; : mimeType;
} }
private static List<ImageVersion> GetFormats(
User user, IImageInfo ident, DriveFileCreationRequest request, bool skipImageProcessing
)
{
//TODO: make this configurable
var origFormat = new ImageFormat.Keep(Path.GetExtension(request.Filename), request.MimeType);
var orig = new ImageVersion(KeyEnum.Original, origFormat);
List<ImageVersion> res = [orig];
if (skipImageProcessing) return res;
res.Add(new ImageVersion(KeyEnum.Thumbnail, new ImageFormat.Webp(75, 1000)));
if (user.IsLocalUser)
{
var q = ident.MimeType is "image/png" ? 100 : 75;
res.Add(new ImageVersion(KeyEnum.Public, new ImageFormat.Webp(q, 2048)));
}
return res;
}
/// <summary> /// <summary>
/// We can't trust the Content-Length header, and it might be null. /// We can't trust the Content-Length header, and it might be null.
/// This makes sure that we only ever read up to maxLength into memory. /// This makes sure that we only ever read up to maxLength into memory.
@ -475,7 +507,6 @@ public class DriveFileCreationRequest
public string? Uri; public string? Uri;
} }
//TODO: set uri as well (which may be different)
file static class DriveFileExtensions file static class DriveFileExtensions
{ {
public static DriveFile Clone(this DriveFile file, User user, DriveFileCreationRequest request) public static DriveFile Clone(this DriveFile file, User user, DriveFileCreationRequest request)
@ -501,9 +532,9 @@ file static class DriveFileExtensions
AccessKey = file.AccessKey, AccessKey = file.AccessKey,
ThumbnailUrl = file.ThumbnailUrl, ThumbnailUrl = file.ThumbnailUrl,
IsSensitive = request.IsSensitive, IsSensitive = request.IsSensitive,
WebpublicType = file.WebpublicType, PublicMimeType = file.PublicMimeType,
WebpublicUrl = file.WebpublicUrl, PublicUrl = file.PublicUrl,
WebpublicAccessKey = file.WebpublicAccessKey, PublicAccessKey = file.PublicAccessKey,
StoredInternal = file.StoredInternal, StoredInternal = file.StoredInternal,
UserHost = user.Host, UserHost = user.Host,
Comment = request.Comment, Comment = request.Comment,

View file

@ -43,7 +43,7 @@ public partial class EmojiService(
MimeType = mimeType, MimeType = mimeType,
IsSensitive = false IsSensitive = false
}; };
var driveFile = await driveSvc.StoreFile(input, user, request, skipImageProcessing: true); var driveFile = await driveSvc.StoreFile(input, user, request, true);
var id = IdHelpers.GenerateSlowflakeId(); var id = IdHelpers.GenerateSlowflakeId();
var emoji = new Emoji var emoji = new Emoji
@ -54,7 +54,7 @@ public partial class EmojiService(
Category = category, Category = category,
UpdatedAt = DateTime.UtcNow, UpdatedAt = DateTime.UtcNow,
OriginalUrl = driveFile.Url, OriginalUrl = driveFile.Url,
PublicUrl = driveFile.PublicUrl, PublicUrl = driveFile.AccessUrl,
Width = driveFile.Properties.Width, Width = driveFile.Properties.Width,
Height = driveFile.Properties.Height Height = driveFile.Properties.Height
}; };
@ -69,7 +69,7 @@ public partial class EmojiService(
public async Task<Emoji> CloneEmoji(Emoji existing) public async Task<Emoji> CloneEmoji(Emoji existing)
{ {
var user = await sysUserSvc.GetInstanceActorAsync(); var user = await sysUserSvc.GetInstanceActorAsync();
var driveFile = await driveSvc.StoreFile(existing.OriginalUrl, user, sensitive: false, forceStore: true, var driveFile = await driveSvc.StoreFile(existing.OriginalUrl, user, false, forceStore: true,
skipImageProcessing: false) ?? skipImageProcessing: false) ??
throw new Exception("Error storing emoji file"); throw new Exception("Error storing emoji file");
@ -79,7 +79,7 @@ public partial class EmojiService(
Name = existing.Name, Name = existing.Name,
UpdatedAt = DateTime.UtcNow, UpdatedAt = DateTime.UtcNow,
OriginalUrl = driveFile.Url, OriginalUrl = driveFile.Url,
PublicUrl = driveFile.PublicUrl, PublicUrl = driveFile.AccessUrl,
Width = driveFile.Properties.Width, Width = driveFile.Properties.Width,
Height = driveFile.Properties.Height Height = driveFile.Properties.Height
}; };

View file

@ -0,0 +1,9 @@
namespace Iceshrimp.Backend.Core.Services.ImageProcessing;
public interface IImageInfo
{
public int Width { get; }
public int Height { get; }
public bool IsAnimated { get; }
public string? MimeType { get; }
}

View file

@ -0,0 +1,24 @@
namespace Iceshrimp.Backend.Core.Services.ImageProcessing;
public interface IImageProcessorBase
{
public string DisplayName { get; }
public int Priority { get; }
}
public interface IImageProcessor : IImageProcessorBase
{
public bool CanIdentify { get; }
public bool CanGenerateBlurhash { get; }
public IImageInfo Identify(byte[] input);
public bool CanEncode(ImageFormat format);
public Stream Encode(byte[] input, IImageInfo ident, ImageFormat format);
public string Blurhash(byte[] input, IImageInfo ident);
}
public abstract class ImageProcessorBase(string displayName, int priority) : IImageProcessorBase
{
public string DisplayName => displayName;
public int Priority => priority;
}

View file

@ -0,0 +1,72 @@
using System.ComponentModel.DataAnnotations;
namespace Iceshrimp.Backend.Core.Services.ImageProcessing;
public abstract record ImageFormat(string Extension, string MimeType)
{
public record Keep(string Extension, string MimeType) : ImageFormat(Extension, MimeType);
//TODO: public record StripExifAndIcc(string Extension, string MimeType) : ImageFormat(Extension, MimeType);
public record Webp(
Webp.Compression Mode,
[Range(0, 100)] int Quality,
int TargetRes
) : ImageFormat("webp", "image/webp")
{
public enum Compression
{
Lossy,
NearLossless,
Lossless
}
}
public record Avif(
Avif.Compression Mode,
[Range(0, 100)] int Quality,
[Range(8, 12)] int? BitDepth,
int TargetRes
) : ImageFormat("avif", "image/avif")
{
public enum Compression
{
Lossy,
Lossless
}
}
public record Jxl(
Jxl.Compression Mode,
[Range(0, 100)] int Quality,
[Range(1, 9)] int Effort,
int TargetRes
) : ImageFormat("jxl", "image/jxl")
{
public enum Compression
{
Lossy,
Lossless
}
}
}
public enum ImageFormatEnum
{
None,
Keep,
Webp,
Avif,
Jxl
}
public record ImageVersion(ImageVersion.KeyEnum Key, ImageFormat Format)
{
public enum KeyEnum
{
Original,
Thumbnail,
Public
}
public static ImageVersion Stub => new(KeyEnum.Original, new ImageFormat.Keep("", ""));
}

View file

@ -0,0 +1,151 @@
using CommunityToolkit.HighPerformance;
using Iceshrimp.Backend.Core.Configuration;
using Iceshrimp.Backend.Core.Helpers;
using Microsoft.Extensions.Options;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Memory;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
using ImageSharpConfig = SixLabors.ImageSharp.Configuration;
namespace Iceshrimp.Backend.Core.Services.ImageProcessing;
public class ImageSharpProcessor : ImageProcessorBase, IImageProcessor
{
private readonly ILogger<ImageSharpProcessor> _logger;
private readonly ImageSharpConfig _sharpConfig;
private readonly ImageSharpConfig _sharpConfigContiguous;
public bool CanIdentify => true;
public bool CanGenerateBlurhash => true;
public ImageSharpProcessor(
ILogger<ImageSharpProcessor> logger, IOptions<Config.StorageSection> config
) : base("ImageSharp", 1)
{
_logger = logger;
_sharpConfig = ImageSharpConfig.Default.Clone();
// @formatter:off
_sharpConfig.MemoryAllocator = MemoryAllocator.Create(new MemoryAllocatorOptions
{
// 1MP / 1000000 px * 4 channels (RGBA) * 8 bits per channel / 8 bit per byte / 1024 byte per kb / 1024 kb per mb
// This works out to ~3.85MB per Mpx, so 4 leaves a bit of buffer.
AllocationLimitMegabytes = config.Value.MediaProcessing.MaxResolutionMpx * 4
});
_sharpConfigContiguous = _sharpConfig.Clone();
_sharpConfigContiguous.PreferContiguousImageBuffers = true;
// @formatter:on
}
public IImageInfo Identify(byte[] input)
{
return new ImageSharpInfo(Image.Identify(input));
}
public bool CanEncode(ImageFormat format)
{
return format switch
{
ImageFormat.Webp => true,
ImageFormat.Jxl => false,
ImageFormat.Avif => false,
_ => throw new ArgumentOutOfRangeException(nameof(format), format, null)
};
}
public Stream Encode(byte[] input, IImageInfo ident, ImageFormat format)
{
return format switch
{
ImageFormat.Webp opts => EncodeWebp(input, ident, opts),
_ => throw new ArgumentOutOfRangeException(nameof(format))
};
}
private Stream EncodeWebp(byte[] data, IImageInfo ident, ImageFormat.Webp opts)
{
using var image = GetImage<Rgba32>(data, ident, opts.TargetRes);
var thumbEncoder = new WebpEncoder
{
Quality = opts.Quality,
FileFormat = opts.Mode == ImageFormat.Webp.Compression.Lossless
? WebpFileFormatType.Lossless
: WebpFileFormatType.Lossy,
NearLossless = opts.Mode == ImageFormat.Webp.Compression.NearLossless
};
var stream = new MemoryStream();
image.SaveAsWebp(stream, thumbEncoder);
return stream;
}
public string Blurhash(byte[] data, IImageInfo ident)
{
using var image = GetImage<Rgb24>(data, ident, 100, preferContiguous: true);
return Blurhash(image);
}
// Since we can't work with Span<T> objects in async blocks, this needs to be done in a separate method.
private string Blurhash(Image<Rgb24> image)
{
Span<Rgb24> span;
if (image.DangerousTryGetSinglePixelMemory(out var mem))
{
span = mem.Span;
}
else
{
_logger.LogWarning("Failed to generate blurhash using ImageSharp: Memory region not contiguous. Falling back to block copy...");
span = new Rgb24[image.Width * image.Height];
image.CopyPixelDataTo(span);
}
return BlurhashHelper.Encode(span.AsSpan2D(image.Height, image.Width), 7, 7);
}
private Image<TPixel> GetImage<TPixel>(
byte[] data, IImageInfo ident, int width, int? height = null, bool preferContiguous = false
) where TPixel : unmanaged, IPixel<TPixel>
{
width = Math.Min(ident.Width, width);
height = Math.Min(ident.Height, height ?? width);
var size = new Size(width, height.Value);
var options = new DecoderOptions
{
MaxFrames = 1,
TargetSize = size,
Configuration = preferContiguous ? _sharpConfigContiguous : _sharpConfig
};
var image = Image.Load<TPixel>(options, data);
image.Mutate(x => x.AutoOrient());
image.Metadata.ExifProfile = null;
var opts = new ResizeOptions { Size = size, Mode = ResizeMode.Max };
image.Mutate(p => p.Resize(opts));
return image;
}
private class ImageSharpInfo(ImageInfo info) : IImageInfo
{
public int Width => info.Width;
public int Height => info.Height;
public bool IsAnimated => info.IsAnimated || info.FrameMetadataCollection.Count != 0;
public string? MimeType
{
get
{
if (info.Metadata.DecodedImageFormat is PngFormat && info.IsAnimated)
return "image/apng";
return info.Metadata.DecodedImageFormat?.DefaultMimeType;
}
}
public static implicit operator ImageSharpInfo(ImageInfo src) => new(src);
}
}

View file

@ -0,0 +1,139 @@
using System.Runtime.InteropServices;
using System.Security;
using CommunityToolkit.HighPerformance;
using Iceshrimp.Backend.Core.Helpers;
using NetVips;
using SixLabors.ImageSharp.PixelFormats;
namespace Iceshrimp.Backend.Core.Services.ImageProcessing;
public class VipsProcessor : ImageProcessorBase, IImageProcessor
{
private readonly ILogger<VipsProcessor> _logger;
// Set to false until https://github.com/libvips/libvips/issues/2537 is implemented
public bool CanIdentify => false;
public bool CanGenerateBlurhash => true;
public VipsProcessor(ILogger<VipsProcessor> logger) : base("LibVips", 0)
{
_logger = logger;
//TODO: Implement something similar to https://github.com/lovell/sharp/blob/da655a1859744deec9f558effa5c9981ef5fd6d3/lib/utility.js#L153C5-L158
NetVips.NetVips.Concurrency = 1;
// We want to know when we have a memory leak
NetVips.NetVips.Leak = true;
// We don't need the VIPS operation or file cache
Cache.Max = 0;
Cache.MaxFiles = 0;
Cache.MaxMem = 0;
Log.SetLogHandler("VIPS", Enums.LogLevelFlags.Warning | Enums.LogLevelFlags.Error,
VipsLogDelegate);
}
public bool CanEncode(ImageFormat format)
{
return format switch
{
ImageFormat.Webp => true,
ImageFormat.Jxl => true,
ImageFormat.Avif => true,
_ => throw new ArgumentOutOfRangeException(nameof(format), format, null)
};
}
public Stream Encode(byte[] input, IImageInfo _, ImageFormat format)
{
return format switch
{
ImageFormat.Webp opts => EncodeWebp(input, opts),
ImageFormat.Jxl opts => EncodeJxl(input, opts),
ImageFormat.Avif opts => EncodeAvif(input, opts),
_ => throw new ArgumentOutOfRangeException(nameof(format))
};
}
public string Blurhash(byte[] buf, IImageInfo ident)
{
using var blurhashImageSource =
Image.ThumbnailBuffer(buf, 100, height: 100, size: Enums.Size.Down);
using var blurhashImage = blurhashImageSource.Interpretation == Enums.Interpretation.Srgb
? blurhashImageSource
: blurhashImageSource.Colourspace(Enums.Interpretation.Srgb);
using var blurhashImageFlattened = blurhashImage.HasAlpha() ? blurhashImage.Flatten() : blurhashImage;
using var blurhashImageActual = blurhashImageFlattened.Cast(Enums.BandFormat.Uchar);
var blurBuf = blurhashImageActual.WriteToMemory();
var blurPixels = MemoryMarshal.Cast<byte, Rgb24>(blurBuf).AsSpan2D(blurhashImage.Height, blurhashImage.Width);
return BlurhashHelper.Encode(blurPixels, 7, 7);
}
public IImageInfo Identify(byte[] input) => new VipsImageInfo(Image.NewFromBuffer(input));
private static MemoryStream EncodeWebp(byte[] buf, ImageFormat.Webp opts)
{
using var image = Thumbnail(buf, opts.TargetRes);
var stream = new MemoryStream();
image.WebpsaveStream(stream, opts.Quality, opts.Mode == ImageFormat.Webp.Compression.Lossless,
nearLossless: opts.Mode == ImageFormat.Webp.Compression.NearLossless);
return stream;
}
private static MemoryStream EncodeAvif(byte[] buf, ImageFormat.Avif opts)
{
using var image = Thumbnail(buf, opts.TargetRes);
var stream = new MemoryStream();
image.HeifsaveStream(stream, opts.Quality, lossless: opts.Mode == ImageFormat.Avif.Compression.Lossless,
bitdepth: opts.BitDepth,
compression: Enums.ForeignHeifCompression.Av1);
return stream;
}
private static MemoryStream EncodeJxl(byte[] buf, ImageFormat.Jxl opts)
{
using var image = Thumbnail(buf, opts.TargetRes);
var stream = new MemoryStream();
image.JxlsaveStream(stream, q: opts.Quality, lossless: opts.Mode == ImageFormat.Jxl.Compression.Lossless,
effort: opts.Effort);
return stream;
}
private static Image StripMetadata(Image image)
{
return image.Mutate(mutable =>
{
mutable.Autorot();
foreach (var field in mutable.GetFields())
{
if (field is "icc-profile-data") continue;
mutable.Remove(field);
}
});
}
private static Image Thumbnail(byte[] buf, int targetRes)
{
using var image = Image.ThumbnailBuffer(buf, targetRes, height: targetRes, size: Enums.Size.Down);
return StripMetadata(image);
}
private void VipsLogDelegate(string domain, Enums.LogLevelFlags _, string message) =>
_logger.LogWarning("{domain} - {message}", domain, message);
[SuppressUnmanagedCodeSecurity]
[DllImport("libvips.42", EntryPoint = "vips_image_get_n_pages", CallingConvention = CallingConvention.Cdecl)]
private static extern int GetPageCount(Image image);
private class VipsImageInfo(Image image) : IImageInfo
{
public int Width => image.Width;
public int Height => image.Height;
public bool IsAnimated => GetPageCount(image) > 1;
public string MimeType => throw new NotImplementedException(); //TODO
public static implicit operator VipsImageInfo(Image src) => new(src);
}
}

View file

@ -1,302 +1,138 @@
using System.Runtime.InteropServices; using System.Collections.Immutable;
using CommunityToolkit.HighPerformance; using System.Diagnostics.CodeAnalysis;
using Iceshrimp.Backend.Core.Configuration; using Iceshrimp.Backend.Core.Configuration;
using Iceshrimp.Backend.Core.Database.Tables; using Iceshrimp.Backend.Core.Database.Tables;
using Iceshrimp.Backend.Core.Helpers; using Iceshrimp.Backend.Core.Services.ImageProcessing;
using Microsoft.Extensions.Options; using Microsoft.Extensions.Options;
using SixLabors.ImageSharp; using static Iceshrimp.Backend.Core.Services.ImageProcessing.ImageVersion;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Memory;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
using ImageSharp = SixLabors.ImageSharp.Image;
namespace Iceshrimp.Backend.Core.Services; namespace Iceshrimp.Backend.Core.Services;
public class ImageProcessor public class ImageProcessor
{ {
private readonly ILogger<ImageProcessor> _logger;
private readonly IOptionsMonitor<Config.StorageSection> _config; private readonly IOptionsMonitor<Config.StorageSection> _config;
//TODO: support stripping of exif/icc metadata (without re-encoding)
public ImageProcessor(ILogger<ImageProcessor> logger, IOptionsMonitor<Config.StorageSection> config) private readonly List<IImageProcessor> _imageProcessors;
private readonly ILogger<ImageProcessor> _logger;
public ImageProcessor(
ILogger<ImageProcessor> logger, IOptionsMonitor<Config.StorageSection> config,
IEnumerable<IImageProcessor> imageProcessors
)
{ {
_logger = logger; _logger = logger;
_config = config; _config = config;
_imageProcessors = imageProcessors.OrderBy(p => p.Priority).ToList();
if (config.CurrentValue.MediaProcessing.ImageProcessor == Enums.ImageProcessor.None) // @formatter:off
{ if (_imageProcessors.Count == 0)
_logger.LogInformation("Image processing is disabled as per the configuration."); _logger.LogInformation("Image processing is disabled as per the configuration.");
return; else if (_imageProcessors.Count == 1)
_logger.LogInformation("Using {processor} for image processing.", _imageProcessors[0].DisplayName);
else
_logger.LogInformation("Using [{processors}] for image processing.", string.Join(", ", _imageProcessors.Select(p => p.DisplayName)));
// @formatter:on
} }
SixLabors.ImageSharp.Configuration.Default.MemoryAllocator = MemoryAllocator.Create(new MemoryAllocatorOptions public IImageInfo? IdentifyImage(byte[] buf, DriveFileCreationRequest request)
{ {
// 1MP / 1000000 px * 4 channels (RGBA) * 8 bits per channel / 8 bit per byte / 1024 byte per kb / 1024 kb per mb // @formatter:off
// This works out to ~3.85MB per Mpx, so 4 leaves a bit of buffer. var ident = RunProcessorAction("ident", p => p.Identify(buf), p => p.CanIdentify,
AllocationLimitMegabytes = config.CurrentValue.MediaProcessing.MaxResolutionMpx * 4 () => throw new Exception("No available image processor supports identifying images"));
}); // @formatter:on
#if EnableLibVips // Correct MIME type
if (_config.CurrentValue.MediaProcessing.ImageProcessor != Enums.ImageProcessor.LibVips) if ((request.MimeType == "image" && ident?.MimeType != null) || ident?.MimeType == "image/apng")
{ request.MimeType = ident.MimeType;
_logger.LogDebug("VIPS support was enabled at compile time, but is not enabled in the configuration, skipping VIPS init");
_logger.LogInformation("Using ImageSharp for image processing."); return ident;
return;
} }
//TODO: Implement something similar to https://github.com/lovell/sharp/blob/da655a1859744deec9f558effa5c9981ef5fd6d3/lib/utility.js#L153C5-L158 public ProcessedImage ProcessImage(
NetVips.NetVips.Concurrency = 1; byte[] buf, IImageInfo ident, DriveFileCreationRequest request, IReadOnlyCollection<ImageVersion> formats
)
// We want to know when we have a memory leak
NetVips.NetVips.Leak = true;
// We don't need the VIPS operation or file cache
NetVips.Cache.Max = 0;
NetVips.Cache.MaxFiles = 0;
NetVips.Cache.MaxMem = 0;
NetVips.Log.SetLogHandler("VIPS", NetVips.Enums.LogLevelFlags.Warning | NetVips.Enums.LogLevelFlags.Error,
VipsLogDelegate);
_logger.LogInformation("Using VIPS for image processing.");
#else
if (config.CurrentValue.MediaProcessing.ImageProcessor == Enums.ImageProcessor.LibVips)
{ {
_logger.LogWarning("VIPS support was disabled at compile time, but ImageProcessor is set to LibVips in the configuration. Either compile with -p:EnableLibVips=true, or set the ImageProcessor configuration option to something else."); if (_config.CurrentValue.MediaProcessing.ImageProcessor == Enums.ImageProcessor.None || formats.Count == 0)
return new ProcessedImage(ident, new MemoryStream(buf), request);
// @formatter:off
var blurhash = RunProcessorAction("blurhash", p => p.Blurhash(buf, ident), p => p.CanGenerateBlurhash,
() => _logger.LogWarning("Skipping blurhash generation: No available image processor supports generating blurhashes"),
(p, e) => _logger.LogWarning("Failed to generate blurhash using {processor}: {e}", p, e));
// @formatter:on
var results = formats
.ToDictionary<ImageVersion, ImageVersion, Func<Stream>?>(p => p, ProcessImageFormat)
.AsReadOnly();
return new ProcessedImage(ident) { RequestedFormats = results, Blurhash = blurhash };
Func<Stream>? ProcessImageFormat(ImageVersion p)
{
if (p.Format is ImageFormat.Keep) return () => new MemoryStream(buf);
var proc = _imageProcessors.FirstOrDefault(i => i.CanEncode(p.Format));
if (proc == null) return null;
return () => proc.Encode(buf, ident, p.Format);
}
}
private T? RunProcessorAction<T>(
string name, Func<IImageProcessor, T> action, Func<IImageProcessor, bool> locator,
Action fallback, Action<IImageProcessor, Exception>? fallthrough = null
) where T : class
{
var processors = _imageProcessors.Where(locator).ToImmutableArray();
if (processors.Length == 0)
{
fallback();
return null;
}
foreach (var processor in processors)
{
try
{
return action(processor);
}
catch (Exception e)
{
if (fallthrough != null)
{
fallthrough(processor, e);
} }
else else
{ {
_logger.LogDebug("VIPS support was disabled at compile time, skipping VIPS init"); _logger.LogWarning("Processor {name} failed to run {action}, falling through...",
processor.DisplayName, name);
}
}
} }
_logger.LogInformation("Using ImageSharp for image processing."); _logger.LogWarning("All processors failed to run {action}, returning null.", name);
#endif return null;
} }
public class Result public class ProcessedImage : DriveFile.FileProperties
{ {
public string? Blurhash; public string? Blurhash;
public required DriveFile.FileProperties Properties; public required IReadOnlyDictionary<ImageVersion, Func<Stream>?> RequestedFormats;
public Func<Stream, Task>? RenderThumbnail;
public Func<Stream, Task>? RenderWebpublic; public ProcessedImage(IImageInfo info)
{
Width = info.Width;
Height = info.Height;
} }
public async Task<Result?> ProcessImage(Stream data, DriveFileCreationRequest request, bool genThumb, bool genWebp) [SetsRequiredMembers]
public ProcessedImage(IImageInfo info, Stream original, DriveFileCreationRequest request) : this(info)
{ {
try var format = new ImageFormat.Keep(Path.GetExtension(request.Filename), request.MimeType);
RequestedFormats = new Dictionary<ImageVersion, Func<Stream>?>
{ {
var pre = DateTime.Now; { new ImageVersion(KeyEnum.Original, format), () => original }
var ident = await ImageSharp.IdentifyAsync(data);
data.Seek(0, SeekOrigin.Begin);
Result? res = null;
// Correct mime type
if (request.MimeType == "image" && ident.Metadata.DecodedImageFormat?.DefaultMimeType != null)
request.MimeType = ident.Metadata.DecodedImageFormat.DefaultMimeType;
if (ident.Metadata.DecodedImageFormat is PngFormat && ident.IsAnimated)
request.MimeType = "image/apng";
if (_config.CurrentValue.MediaProcessing.ImageProcessor == Enums.ImageProcessor.None)
{
var props = new DriveFile.FileProperties { Width = ident.Size.Width, Height = ident.Size.Height };
return new Result { Properties = props };
}
// Don't generate thumb/webp for animated images
if (ident.FrameMetadataCollection.Count != 0 || ident.IsAnimated)
{
genThumb = false;
genWebp = false;
}
if (ident.Width * ident.Height > _config.CurrentValue.MediaProcessing.MaxResolutionMpx * 1000 * 1000)
{
_logger.LogDebug("Image is larger than {mpx}mpx ({width}x{height}), bypassing image processing pipeline",
_config.CurrentValue.MediaProcessing.MaxResolutionMpx, ident.Width, ident.Height);
var props = new DriveFile.FileProperties { Width = ident.Size.Width, Height = ident.Size.Height };
return new Result { Properties = props };
}
#if EnableLibVips
if (_config.CurrentValue.MediaProcessing.ImageProcessor == Enums.ImageProcessor.LibVips)
{
try
{
byte[] buf;
await using (var memoryStream = new MemoryStream())
{
await data.CopyToAsync(memoryStream);
buf = memoryStream.ToArray();
}
res = await ProcessImageVips(buf, ident, request, genThumb, genWebp);
}
catch (Exception e)
{
_logger.LogWarning("Failed to process image of type {type} with VIPS, falling back to ImageSharp: {e}",
request.MimeType, e.Message);
}
}
#endif
try
{
res ??= await ProcessImageSharp(data, ident, request, genThumb, genWebp);
}
catch (Exception e)
{
_logger.LogWarning("Failed to process image of type {type} with ImageSharp: {e}",
request.MimeType, e.Message);
var props = new DriveFile.FileProperties { Width = ident.Size.Width, Height = ident.Size.Height };
return new Result { Properties = props };
}
_logger.LogTrace("Image processing took {ms} ms", (int)(DateTime.Now - pre).TotalMilliseconds);
return res;
}
catch (Exception e)
{
_logger.LogError("Failed to process image with mime type {type}: {e}",
request.MimeType, e.Message);
return null;
}
}
private async Task<Result> ProcessImageSharp(
Stream data, ImageInfo ident, DriveFileCreationRequest request, bool genThumb, bool genWebp
)
{
var properties = new DriveFile.FileProperties { Width = ident.Size.Width, Height = ident.Size.Height };
var res = new Result { Properties = properties };
// Calculate blurhash using a x100px image for improved performance
using (var image = await GetImage<Rgb24>(data, ident, 100, preferContiguous: true))
{
res.Blurhash = GetBlurhashImageSharp(image);
}
if (genThumb)
{
res.RenderThumbnail = async stream =>
{
using var image = await GetImage<Rgba32>(data, ident, 1000);
var thumbEncoder = new WebpEncoder { Quality = 75, FileFormat = WebpFileFormatType.Lossy };
await image.SaveAsWebpAsync(stream, thumbEncoder);
};
}
if (genWebp)
{
res.RenderWebpublic = async stream =>
{
using var image = await GetImage<Rgba32>(data, ident, 2048);
var q = request.MimeType == "image/png" ? 100 : 75;
var thumbEncoder = new WebpEncoder { Quality = q, FileFormat = WebpFileFormatType.Lossy };
await image.SaveAsWebpAsync(stream, thumbEncoder);
};
}
return res;
}
// Since we can't work with Span<T> objects in async blocks, this needs to be done in a separate method.
private string GetBlurhashImageSharp(Image<Rgb24> image)
{
Span<Rgb24> span;
if (image.DangerousTryGetSinglePixelMemory(out var mem))
{
span = mem.Span;
}
else
{
_logger.LogWarning("Failed to generate blurhash using ImageSharp: Memory region not contiguous. Falling back to block copy...");
span = new Rgb24[image.Width * image.Height];
image.CopyPixelDataTo(span);
}
return BlurhashHelper.Encode(span.AsSpan2D(image.Height, image.Width), 7, 7);
}
private static async Task<Image<TPixel>> GetImage<TPixel>(
Stream data, ImageInfo ident, int width, int? height = null, bool preferContiguous = false
) where TPixel : unmanaged, IPixel<TPixel>
{
width = Math.Min(ident.Width, width);
height = Math.Min(ident.Height, height ?? width);
var size = new Size(width, height.Value);
var config = preferContiguous
? SixLabors.ImageSharp.Configuration.Default.Clone()
: SixLabors.ImageSharp.Configuration.Default;
if (preferContiguous)
config.PreferContiguousImageBuffers = true;
var options = new DecoderOptions
{
MaxFrames = 1,
TargetSize = size,
Configuration = config
};
data.Seek(0, SeekOrigin.Begin);
var image = await ImageSharp.LoadAsync<TPixel>(options, data);
image.Mutate(x => x.AutoOrient());
var opts = new ResizeOptions { Size = size, Mode = ResizeMode.Max };
image.Mutate(p => p.Resize(opts));
return image;
}
#if EnableLibVips
private static Task<Result> ProcessImageVips(
byte[] buf, ImageInfo ident, DriveFileCreationRequest request, bool genThumb, bool genWebp
)
{
var properties = new DriveFile.FileProperties { Width = ident.Size.Width, Height = ident.Size.Height };
var res = new Result { Properties = properties };
// Calculate blurhash using a x100px image for improved performance
using var blurhashImageSource =
NetVips.Image.ThumbnailBuffer(buf, width: 100, height: 100, size: NetVips.Enums.Size.Down);
using var blurhashImage = blurhashImageSource.Interpretation == NetVips.Enums.Interpretation.Srgb
? blurhashImageSource
: blurhashImageSource.Colourspace(NetVips.Enums.Interpretation.Srgb);
using var blurhashImageFlattened = blurhashImage.HasAlpha() ? blurhashImage.Flatten() : blurhashImage;
using var blurhashImageActual = blurhashImageFlattened.Cast(NetVips.Enums.BandFormat.Uchar);
var blurBuf = blurhashImageActual.WriteToMemory();
var blurPixels = MemoryMarshal.Cast<byte, Rgb24>(blurBuf).AsSpan2D(blurhashImage.Height, blurhashImage.Width);
res.Blurhash = BlurhashHelper.Encode(blurPixels, 7, 7);
if (genThumb)
{
res.RenderThumbnail = stream =>
{
using var thumbnailImage =
NetVips.Image.ThumbnailBuffer(buf, width: 1000, height: 1000, size: NetVips.Enums.Size.Down);
thumbnailImage.WebpsaveStream(stream, 75, false);
return Task.CompletedTask;
};
// Generate webpublic for local users, if image is not animated
if (genWebp)
{
res.RenderWebpublic = stream =>
{
using var webpublicImage =
NetVips.Image.ThumbnailBuffer(buf, width: 2048, height: 2048,
size: NetVips.Enums.Size.Down);
webpublicImage.WebpsaveStream(stream, request.MimeType == "image/png" ? 100 : 75, false);
return Task.CompletedTask;
}; };
} }
} }
return Task.FromResult(res);
}
private void VipsLogDelegate(string domain, NetVips.Enums.LogLevelFlags _, string message) =>
_logger.LogWarning("{domain} - {message}", domain, message);
#endif
} }

View file

@ -111,14 +111,14 @@ public class StorageMaintenanceService(
deletionQueue.Add(path); deletionQueue.Add(path);
} }
if (file.WebpublicAccessKey != null) if (file.PublicAccessKey != null)
{ {
var path = Path.Join(pathBase, file.WebpublicAccessKey); var path = Path.Join(pathBase, file.PublicAccessKey);
var stream = File.OpenRead(path); var stream = File.OpenRead(path);
var filename = file.Name.EndsWith(".webp") ? file.Name : $"{file.Name}.webp"; var filename = file.Name.EndsWith(".webp") ? file.Name : $"{file.Name}.webp";
await objectStorageSvc.UploadFileAsync(file.WebpublicAccessKey, "image/webp", filename, stream); await objectStorageSvc.UploadFileAsync(file.PublicAccessKey, "image/webp", filename, stream);
file.WebpublicUrl = objectStorageSvc.GetFilePublicUrl(file.WebpublicAccessKey).AbsoluteUri; file.PublicUrl = objectStorageSvc.GetFilePublicUrl(file.PublicAccessKey).AbsoluteUri;
deletionQueue.Add(path); deletionQueue.Add(path);
} }
@ -127,7 +127,7 @@ public class StorageMaintenanceService(
item.StoredInternal = false; item.StoredInternal = false;
item.Url = file.Url; item.Url = file.Url;
item.ThumbnailUrl = file.ThumbnailUrl; item.ThumbnailUrl = file.ThumbnailUrl;
item.WebpublicUrl = file.WebpublicUrl; item.PublicUrl = file.PublicUrl;
} }
foreach (var item in deletionQueue) pathsToDelete.Add(item); foreach (var item in deletionQueue) pathsToDelete.Add(item);

View file

@ -45,6 +45,7 @@
<PackageReference Include="Ulid" Version="1.3.3" /> <PackageReference Include="Ulid" Version="1.3.3" />
<PackageReference Include="Iceshrimp.WebPush" Version="2.0.0" /> <PackageReference Include="Iceshrimp.WebPush" Version="2.0.0" />
<PackageReference Include="Iceshrimp.AssemblyUtils" Version="1.0.0" /> <PackageReference Include="Iceshrimp.AssemblyUtils" Version="1.0.0" />
<PackageReference Include="NetVips" Version="2.4.1" />
</ItemGroup> </ItemGroup>
<!-- Transitive dependency version overrides to patch security vulnerabilities --> <!-- Transitive dependency version overrides to patch security vulnerabilities -->
@ -53,18 +54,8 @@
<PackageReference Include="System.Formats.Asn1" Version="8.0.1" /> <PackageReference Include="System.Formats.Asn1" Version="8.0.1" />
</ItemGroup> </ItemGroup>
<!-- If the build flag EnableLibVips is set, switch the ImageProcessor to LibVips (with ImageSharp as fallback) --> <!-- If the build flag BundleNativeDeps is set, reference the LibVips native dependecy -->
<PropertyGroup Condition=" '$(EnableLibVips)' == 'true' "> <ItemGroup Condition=" '$(BundleNativeDeps)' == 'true' ">
<DefineConstants>$(DefineConstants);EnableLibVips</DefineConstants>
</PropertyGroup>
<!-- If the build flag EnableLibVips is set, reference the .NET bindings for LibVips -->
<ItemGroup Condition=" '$(EnableLibVips)' == 'true' ">
<PackageReference Include="NetVips" Version="2.4.1" />
</ItemGroup>
<!-- If the build flags EnableLibVips and BundleNativeDeps are set, reference the LibVips native dependecy -->
<ItemGroup Condition=" '$(EnableLibVips)' == 'true' And '$(BundleNativeDeps)' == 'true' ">
<PackageReference Include="NetVips.Native" Version="8.15.2-iceshrimp" /> <PackageReference Include="NetVips.Native" Version="8.15.2-iceshrimp" />
</ItemGroup> </ItemGroup>

View file

@ -32,7 +32,7 @@ builder.Services.AddSignalR().AddMessagePackProtocol();
builder.Services.AddResponseCompression(); builder.Services.AddResponseCompression();
builder.Services.AddRazorPages(); builder.Services.AddRazorPages();
builder.Services.AddServices(); builder.Services.AddServices(builder.Configuration);
builder.Services.ConfigureServices(builder.Configuration); builder.Services.ConfigureServices(builder.Configuration);
builder.WebHost.ConfigureKestrel(builder.Configuration); builder.WebHost.ConfigureKestrel(builder.Configuration);
builder.WebHost.UseStaticWebAssets(); builder.WebHost.UseStaticWebAssets();

View file

@ -142,7 +142,7 @@ Path = /path/to/media/location
;; Which image processor to use. ;; Which image processor to use.
;; ;;
;; ImageSharp = .NET library, slower, lower memory footprint. No external dependencies. ;; ImageSharp = .NET library, slower, lower memory footprint. No external dependencies.
;; LibVips = Native library, faster, higher and spikier memory footprint. Requires compilation with -p:EnableLibVips=true & for libvips to be installed on the system. ;; LibVips = Native library, faster, higher and spikier memory footprint. Requires compilation with -p:BundleNativeDeps=true, or for libvips to be installed on the system.
;; None = Disables image processing, fastest, lowest memory footprint. Caution: metadata (e.g. location data) for locally originating images will *not* be stripped! ;; None = Disables image processing, fastest, lowest memory footprint. Caution: metadata (e.g. location data) for locally originating images will *not* be stripped!
;; ;;
;; Options: [ImageSharp, LibVips, None] ;; Options: [ImageSharp, LibVips, None]
@ -161,6 +161,10 @@ MaxFileSize = 10M
;; Caution: metadata (e.g. location data) for locally originating images will *not* be stripped for files larger than this ;; Caution: metadata (e.g. location data) for locally originating images will *not* be stripped for files larger than this
MaxResolutionMpx = 30 MaxResolutionMpx = 30
[Storage:MediaProcessing:Formats]
Local = Webp
Remote = Webp
[Logging:LogLevel] [Logging:LogLevel]
Default = Information Default = Information
Iceshrimp = Information Iceshrimp = Information

View file

@ -42,7 +42,7 @@ public static class MockObjects
config.AddIniStream(AssemblyHelpers.GetEmbeddedResourceStream("configuration.ini")); config.AddIniStream(AssemblyHelpers.GetEmbeddedResourceStream("configuration.ini"));
var collection = new ServiceCollection(); var collection = new ServiceCollection();
collection.AddServices(); collection.AddServices(config);
collection.ConfigureServices(config); collection.ConfigureServices(config);
return collection.BuildServiceProvider(); return collection.BuildServiceProvider();