From 942a058b60aac876bcbc9c8d88f0aaad9ca0dc27 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Sun, 14 Dec 2025 14:26:40 +0100 Subject: [PATCH 1/6] add onedrive google drive and dropbox providers --- ManagedCode.Storage.slnx | 3 + .../Clients/DropboxClientWrapper.cs | 154 ++ .../Clients/DropboxItemMetadata.cs | 12 + .../Clients/IDropboxClientWrapper.cs | 24 + .../DropboxStorage.cs | 242 +++ .../DropboxStorageProvider.cs | 38 + .../IDropboxStorage.cs | 9 + .../ManagedCode.Storage.Dropbox.csproj | 19 + .../Options/DropboxStorageOptions.cs | 16 + Storages/ManagedCode.Storage.Dropbox/PLAN.md | 8 + .../Clients/GoogleDriveClient.cs | 161 ++ .../Clients/IGoogleDriveClient.cs | 24 + .../GoogleDriveStorage.cs | 253 +++ .../GoogleDriveStorageProvider.cs | 38 + .../IGoogleDriveStorage.cs | 9 + .../ManagedCode.Storage.GoogleDrive.csproj | 19 + .../Options/GoogleDriveStorageOptions.cs | 16 + .../ManagedCode.Storage.GoogleDrive/PLAN.md | 8 + .../Clients/GraphOneDriveClient.cs | 58 + .../Clients/IOneDriveClient.cs | 24 + .../IOneDriveStorage.cs | 9 + .../ManagedCode.Storage.OneDrive.csproj | 19 + .../OneDriveStorage.cs | 254 +++ .../OneDriveStorageProvider.cs | 40 + .../Options/OneDriveStorageOptions.cs | 18 + Storages/ManagedCode.Storage.OneDrive/PLAN.md | 9 + .../ManagedCode.Storage.Tests.csproj | 3 + .../CloudDrive/CloudDriveStorageTests.cs | 376 ++++ dotnet-install.sh | 1888 +++++++++++++++++ 29 files changed, 3751 insertions(+) create mode 100644 Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj create mode 100644 Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/PLAN.md create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/PLAN.md create mode 100644 Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj create mode 100644 Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/PLAN.md create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs create mode 100755 dotnet-install.sh diff --git a/ManagedCode.Storage.slnx b/ManagedCode.Storage.slnx index f431447..ea3ca1a 100644 --- a/ManagedCode.Storage.slnx +++ b/ManagedCode.Storage.slnx @@ -16,6 +16,9 @@ + + + diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs new file mode 100644 index 0000000..0bcab55 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs @@ -0,0 +1,154 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Dropbox.Api; +using Dropbox.Api.Files; + +namespace ManagedCode.Storage.Dropbox.Clients; + +public class DropboxClientWrapper : IDropboxClientWrapper +{ + private readonly DropboxClient _client; + + public DropboxClientWrapper(DropboxClient client) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + } + + public async Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(rootPath)) + { + return; + } + + var normalized = Normalize(rootPath); + try + { + await _client.Files.GetMetadataAsync(normalized); + } + catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound) + { + if (!createIfNotExists) + { + return; + } + + await _client.Files.CreateFolderV2Async(normalized, autorename: false); + } + } + + public async Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, path); + var uploaded = await _client.Files.UploadAsync(fullPath, WriteMode.Overwrite.Instance, body: content); + var metadata = (await _client.Files.GetMetadataAsync(uploaded.PathLower)).AsFile; + return ToItem(metadata); + } + + public async Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, path); + var response = await _client.Files.DownloadAsync(fullPath); + return await response.GetContentAsStreamAsync(); + } + + public async Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, path); + await _client.Files.DeleteV2Async(fullPath); + return true; + } + + public async Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, path); + try + { + await _client.Files.GetMetadataAsync(fullPath); + return true; + } + catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound) + { + return false; + } + } + + public async Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, path); + try + { + var metadata = await _client.Files.GetMetadataAsync(fullPath); + return metadata.IsFile ? ToItem(metadata.AsFile) : null; + } + catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound) + { + return null; + } + } + + public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var fullPath = Combine(rootPath, directory ?? string.Empty); + var list = await _client.Files.ListFolderAsync(fullPath); + foreach (var item in list.Entries) + { + if (item.IsFile) + { + yield return ToItem(item.AsFile); + } + } + + while (list.HasMore) + { + list = await _client.Files.ListFolderContinueAsync(list.Cursor); + foreach (var item in list.Entries) + { + if (item.IsFile) + { + yield return ToItem(item.AsFile); + } + } + } + } + + private static DropboxItemMetadata ToItem(FileMetadata file) + { + return new DropboxItemMetadata + { + Name = file.Name, + Path = file.PathLower ?? file.PathDisplay ?? string.Empty, + Size = file.Size, + ClientModified = file.ClientModified, + ServerModified = file.ServerModified + }; + } + + private static string Normalize(string path) + { + var normalized = path.Replace("\\", "/"); + if (!normalized.StartsWith('/')) + { + normalized = "/" + normalized; + } + + return normalized.TrimEnd('/') == string.Empty ? "/" : normalized.TrimEnd('/'); + } + + private static string Combine(string root, string path) + { + var normalizedRoot = Normalize(root); + var normalizedPath = path.Replace("\\", "/").Trim('/'); + if (string.IsNullOrWhiteSpace(normalizedPath)) + { + return normalizedRoot; + } + + return normalizedRoot.EndsWith("/") ? normalizedRoot + normalizedPath : normalizedRoot + "/" + normalizedPath; + } +} diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs new file mode 100644 index 0000000..0acfe36 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs @@ -0,0 +1,12 @@ +using System; + +namespace ManagedCode.Storage.Dropbox.Clients; + +public class DropboxItemMetadata +{ + public required string Name { get; set; } + public required string Path { get; set; } + public ulong Size { get; set; } + public DateTime ClientModified { get; set; } + public DateTime ServerModified { get; set; } +} diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs new file mode 100644 index 0000000..9027f96 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Dropbox.Api.Files; + +namespace ManagedCode.Storage.Dropbox.Clients; + +public interface IDropboxClientWrapper +{ + Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken); + + Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken); + + Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken); + + Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken); + + Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken); + + Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken); + + IAsyncEnumerable ListAsync(string rootPath, string? directory, CancellationToken cancellationToken); +} diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs new file mode 100644 index 0000000..55db4e5 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs @@ -0,0 +1,242 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Communication; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Models; +using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Options; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.Dropbox; + +public class DropboxStorage : BaseStorage, IDropboxStorage +{ + private readonly ILogger? _logger; + + public DropboxStorage(DropboxStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions) + { + _logger = logger; + } + + protected override IDropboxClientWrapper CreateStorageClient() + { + if (StorageOptions.Client != null) + { + return StorageOptions.Client; + } + + if (StorageOptions.DropboxClient != null) + { + return new DropboxClientWrapper(StorageOptions.DropboxClient); + } + + throw new InvalidOperationException("Dropbox client is not configured for storage."); + } + + protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default) + { + try + { + await StorageClient.EnsureRootAsync(StorageOptions.RootPath, StorageOptions.CreateContainerIfNotExists, cancellationToken); + IsContainerCreated = true; + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override Task RemoveContainerAsync(CancellationToken cancellationToken = default) + { + // Dropbox API does not expose a direct container deletion concept; callers manage folders explicitly. + return Task.FromResult(Result.Succeed()); + } + + protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var normalizedDirectory = NormalizeRelativePath(directory); + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken)) + { + var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}"; + await StorageClient.DeleteAsync(StorageOptions.RootPath, path!, cancellationToken); + } + + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var uploaded = await StorageClient.UploadAsync(StorageOptions.RootPath, path, stream, options.MimeType, cancellationToken); + return Result.Succeed(ToBlobMetadata(uploaded, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var remoteStream = await StorageClient.DownloadAsync(StorageOptions.RootPath, path, cancellationToken); + + await using (remoteStream) + await using (var fileStream = localFile.FileStream) + { + await remoteStream.CopyToAsync(fileStream, cancellationToken); + fileStream.Position = 0; + } + + return Result.Succeed(localFile); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var deleted = await StorageClient.DeleteAsync(StorageOptions.RootPath, path, cancellationToken); + return Result.Succeed(deleted); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var exists = await StorageClient.ExistsAsync(StorageOptions.RootPath, path, cancellationToken); + return Result.Succeed(exists); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var item = await StorageClient.GetMetadataAsync(StorageOptions.RootPath, path, cancellationToken); + if (item == null) + { + return Result.Fail(new FileNotFoundException(path)); + } + + return Result.Succeed(ToBlobMetadata(item, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await EnsureContainerExist(cancellationToken); + var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!); + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken)) + { + var fullPath = normalizedDirectory == null ? item.Name : $"{normalizedDirectory}/{item.Name}"; + yield return ToBlobMetadata(item, fullPath); + } + } + + public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(fileName); + var stream = await StorageClient.DownloadAsync(StorageOptions.RootPath, path, cancellationToken); + return Result.Succeed(stream); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed()); + } + + protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed(false)); + } + + private string BuildFullPath(string? relativePath) + { + var normalized = NormalizeRelativePath(relativePath ?? string.Empty); + return string.IsNullOrWhiteSpace(StorageOptions.RootPath) + ? normalized + : string.IsNullOrWhiteSpace(normalized) ? StorageOptions.RootPath.Trim('/') : $"{StorageOptions.RootPath.Trim('/')}/{normalized}"; + } + + private static string NormalizeRelativePath(string path) + { + return path.Replace("\\", "/").Trim('/'); + } + + private BlobMetadata ToBlobMetadata(DropboxItemMetadata file, string fullName) + { + return new BlobMetadata + { + Name = file.Name, + FullName = fullName, + Container = StorageOptions.RootPath, + Uri = new Uri($"https://www.dropbox.com/home/{file.Path.Trim('/')}", UriKind.RelativeOrAbsolute), + CreatedOn = file.ClientModified, + LastModified = file.ServerModified, + Length = file.Size, + MimeType = file.Name + }; + } +} diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs new file mode 100644 index 0000000..d464877 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs @@ -0,0 +1,38 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Extensions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.Dropbox.Options; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.Dropbox; + +public class DropboxStorageProvider(IServiceProvider serviceProvider, DropboxStorageOptions defaultOptions) : IStorageProvider +{ + public Type StorageOptionsType => typeof(DropboxStorageOptions); + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage + where TOptions : class, IStorageOptions + { + if (options is not DropboxStorageOptions dropboxOptions) + { + throw new ArgumentException($"Options must be of type {typeof(DropboxStorageOptions)}", nameof(options)); + } + + var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger; + var storage = new DropboxStorage(dropboxOptions, logger); + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); + } + + public IStorageOptions GetDefaultOptions() + { + return new DropboxStorageOptions + { + RootPath = defaultOptions.RootPath, + DropboxClient = defaultOptions.DropboxClient, + Client = defaultOptions.Client, + CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists + }; + } +} diff --git a/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs new file mode 100644 index 0000000..6edd08a --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs @@ -0,0 +1,9 @@ +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Options; + +namespace ManagedCode.Storage.Dropbox; + +public interface IDropboxStorage : IStorage +{ +} diff --git a/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj new file mode 100644 index 0000000..349d40d --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj @@ -0,0 +1,19 @@ + + + true + + + ManagedCode.Storage.Dropbox + ManagedCode.Storage.Dropbox + Dropbox provider for ManagedCode.Storage. + managedcode, storage, dropbox + + + + + + + + + + diff --git a/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs new file mode 100644 index 0000000..0a504e7 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs @@ -0,0 +1,16 @@ +using Dropbox.Api; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Dropbox.Clients; + +namespace ManagedCode.Storage.Dropbox.Options; + +public class DropboxStorageOptions : IStorageOptions +{ + public IDropboxClientWrapper? Client { get; set; } + + public DropboxClient? DropboxClient { get; set; } + + public string RootPath { get; set; } = string.Empty; + + public bool CreateContainerIfNotExists { get; set; } = true; +} diff --git a/Storages/ManagedCode.Storage.Dropbox/PLAN.md b/Storages/ManagedCode.Storage.Dropbox/PLAN.md new file mode 100644 index 0000000..dac26f5 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/PLAN.md @@ -0,0 +1,8 @@ +# Dropbox integration plan + +- [x] Reference the official `Dropbox.Api` SDK and expose injection through `DropboxStorageOptions`. +- [x] Implement `IDropboxClientWrapper` with a wrapper over `DropboxClient` that aligns with documented upload, download, list, and metadata APIs. +- [x] Connect `DropboxStorage` to the shared abstractions and normalize path handling for custom root prefixes. +- [ ] Add user guidance for creating an app in Dropbox, generating access tokens, and scoping permissions for file access. +- [ ] Build mocks for `IDropboxClientWrapper` that mirror Dropbox metadata shapes so tests can validate uploads, downloads, and deletions without network calls. +- [ ] Provide DI samples (keyed and default) so ASP.NET apps can register Dropbox storage with configuration-bound options. diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs new file mode 100644 index 0000000..2dacdc4 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs @@ -0,0 +1,161 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Google.Apis.Drive.v3; +using DriveFile = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.GoogleDrive.Clients; + +public class GoogleDriveClient : IGoogleDriveClient +{ + private readonly DriveService _driveService; + + public GoogleDriveClient(DriveService driveService) + { + _driveService = driveService ?? throw new ArgumentNullException(nameof(driveService)); + } + + public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken) + { + // Google Drive root exists by default when using "root". Additional folder tree is created on demand in UploadAsync. + return Task.CompletedTask; + } + + public async Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var (parentId, fileName) = await EnsureParentFolderAsync(rootFolderId, path, cancellationToken); + + var fileMetadata = new DriveFile + { + Name = fileName, + Parents = new List { parentId } + }; + + var request = _driveService.Files.Create(fileMetadata, content, contentType ?? "application/octet-stream"); + request.Fields = "id,name,parents,createdTime,modifiedTime,md5Checksum,size"; + return await request.UploadAsync(cancellationToken).ContinueWith(async _ => await _driveService.Files.Get(request.ResponseBody.Id).ExecuteAsync(cancellationToken)).Unwrap(); + } + + public async Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + var file = await FindFileByPathAsync(rootFolderId, path, cancellationToken) ?? throw new FileNotFoundException(path); + var stream = new MemoryStream(); + await _driveService.Files.Get(file.Id).DownloadAsync(stream, cancellationToken); + stream.Position = 0; + return stream; + } + + public async Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + var file = await FindFileByPathAsync(rootFolderId, path, cancellationToken); + if (file == null) + { + return false; + } + + await _driveService.Files.Delete(file.Id).ExecuteAsync(cancellationToken); + return true; + } + + public async Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return await FindFileByPathAsync(rootFolderId, path, cancellationToken) != null; + } + + public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return FindFileByPathAsync(rootFolderId, path, cancellationToken); + } + + public async IAsyncEnumerable ListAsync(string rootFolderId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var parentId = string.IsNullOrWhiteSpace(directory) + ? rootFolderId + : await EnsureFolderPathAsync(rootFolderId, directory!, false, cancellationToken) ?? rootFolderId; + + var request = _driveService.Files.List(); + request.Q = $"'{parentId}' in parents and trashed=false"; + request.Fields = "files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)"; + + do + { + var response = await request.ExecuteAsync(cancellationToken); + foreach (var file in response.Files ?? Enumerable.Empty()) + { + yield return file; + } + + request.PageToken = response.NextPageToken; + } while (!string.IsNullOrEmpty(request.PageToken) && !cancellationToken.IsCancellationRequested); + } + + private async Task<(string ParentId, string Name)> EnsureParentFolderAsync(string rootFolderId, string fullPath, CancellationToken cancellationToken) + { + var normalizedPath = fullPath.Replace("\\", "/").Trim('/'); + var segments = normalizedPath.Split('/', StringSplitOptions.RemoveEmptyEntries); + if (segments.Length == 0) + { + return (rootFolderId, Guid.NewGuid().ToString("N")); + } + + var parentPath = string.Join('/', segments.Take(segments.Length - 1)); + var parentId = await EnsureFolderPathAsync(rootFolderId, parentPath, true, cancellationToken) ?? rootFolderId; + return (parentId, segments.Last()); + } + + private async Task EnsureFolderPathAsync(string rootFolderId, string path, bool createIfMissing, CancellationToken cancellationToken) + { + var currentId = rootFolderId; + foreach (var segment in path.Split('/', StringSplitOptions.RemoveEmptyEntries)) + { + var folder = await FindChildAsync(currentId, segment, cancellationToken); + if (folder == null) + { + if (!createIfMissing) + { + return null; + } + + var metadata = new DriveFile { Name = segment, MimeType = "application/vnd.google-apps.folder", Parents = new List { currentId } }; + folder = await _driveService.Files.Create(metadata).ExecuteAsync(cancellationToken); + } + + currentId = folder.Id; + } + + return currentId; + } + + private async Task FindChildAsync(string parentId, string name, CancellationToken cancellationToken) + { + var request = _driveService.Files.List(); + request.Q = $"'{parentId}' in parents and name='{name}' and trashed=false"; + request.Fields = "files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)"; + var response = await request.ExecuteAsync(cancellationToken); + return response.Files?.FirstOrDefault(); + } + + private async Task FindFileByPathAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + var normalizedPath = path.Replace("\\", "/").Trim('/'); + var segments = normalizedPath.Split('/', StringSplitOptions.RemoveEmptyEntries); + if (segments.Length == 0) + { + return null; + } + + var parentPath = string.Join('/', segments.Take(segments.Length - 1)); + var fileName = segments.Last(); + var parentId = await EnsureFolderPathAsync(rootFolderId, parentPath, false, cancellationToken); + if (parentId == null) + { + return null; + } + + return await FindChildAsync(parentId, fileName, cancellationToken); + } +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs b/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs new file mode 100644 index 0000000..5ac2632 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using DriveFile = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.GoogleDrive.Clients; + +public interface IGoogleDriveClient +{ + Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken); + + Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken); + + Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken); + + Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken); + + Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken); + + Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken); + + IAsyncEnumerable ListAsync(string rootFolderId, string? directory, CancellationToken cancellationToken); +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs new file mode 100644 index 0000000..2d6d941 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs @@ -0,0 +1,253 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Communication; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Models; +using ManagedCode.Storage.GoogleDrive.Clients; +using ManagedCode.Storage.GoogleDrive.Options; +using Microsoft.Extensions.Logging; +using File = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.GoogleDrive; + +public class GoogleDriveStorage : BaseStorage, IGoogleDriveStorage +{ + private readonly ILogger? _logger; + + public GoogleDriveStorage(GoogleDriveStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions) + { + _logger = logger; + } + + protected override IGoogleDriveClient CreateStorageClient() + { + if (StorageOptions.Client != null) + { + return StorageOptions.Client; + } + + if (StorageOptions.DriveService != null) + { + return new GoogleDriveClient(StorageOptions.DriveService); + } + + throw new InvalidOperationException("DriveService client is not configured for Google Drive storage."); + } + + protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default) + { + try + { + await StorageClient.EnsureRootAsync(StorageOptions.RootFolderId, StorageOptions.CreateContainerIfNotExists, cancellationToken); + IsContainerCreated = true; + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override Task RemoveContainerAsync(CancellationToken cancellationToken = default) + { + // Root folder cleanup is not performed automatically; leave underlying Drive content intact. + return Task.FromResult(Result.Succeed()); + } + + protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var normalizedDirectory = NormalizeRelativePath(directory); + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken)) + { + if (item.MimeType == "application/vnd.google-apps.folder") + { + continue; + } + + var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}"; + await StorageClient.DeleteAsync(StorageOptions.RootFolderId, path!, cancellationToken); + } + + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var uploaded = await StorageClient.UploadAsync(StorageOptions.RootFolderId, path, stream, options.MimeType, cancellationToken); + return Result.Succeed(ToBlobMetadata(uploaded, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var remoteStream = await StorageClient.DownloadAsync(StorageOptions.RootFolderId, path, cancellationToken); + + await using (remoteStream) + await using (var fileStream = localFile.FileStream) + { + await remoteStream.CopyToAsync(fileStream, cancellationToken); + fileStream.Position = 0; + } + + return Result.Succeed(localFile); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var deleted = await StorageClient.DeleteAsync(StorageOptions.RootFolderId, path, cancellationToken); + return Result.Succeed(deleted); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var exists = await StorageClient.ExistsAsync(StorageOptions.RootFolderId, path, cancellationToken); + return Result.Succeed(exists); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var item = await StorageClient.GetMetadataAsync(StorageOptions.RootFolderId, path, cancellationToken); + return item == null + ? Result.Fail(new FileNotFoundException($"File '{path}' not found in Google Drive.")) + : Result.Succeed(ToBlobMetadata(item, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await EnsureContainerExist(cancellationToken); + var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!); + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken)) + { + if (item.MimeType == "application/vnd.google-apps.folder") + { + continue; + } + + var fullPath = normalizedDirectory == null ? item.Name! : $"{normalizedDirectory}/{item.Name}"; + yield return ToBlobMetadata(item, fullPath); + } + } + + public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(fileName); + var stream = await StorageClient.DownloadAsync(StorageOptions.RootFolderId, path, cancellationToken); + return Result.Succeed(stream); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed()); + } + + protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed(false)); + } + + private string BuildFullPath(string? relativePath) + { + var normalized = NormalizeRelativePath(relativePath ?? string.Empty); + return normalized; + } + + private static string NormalizeRelativePath(string path) + { + return path.Replace("\\", "/").Trim('/'); + } + + private BlobMetadata ToBlobMetadata(File file, string fullName) + { + return new BlobMetadata + { + Name = file.Name ?? Path.GetFileName(fullName), + FullName = fullName, + Container = StorageOptions.RootFolderId, + Uri = file.WebViewLink != null ? new Uri(file.WebViewLink) : null, + CreatedOn = file.CreatedTimeDateTimeOffset ?? DateTimeOffset.UtcNow, + LastModified = file.ModifiedTimeDateTimeOffset ?? DateTimeOffset.UtcNow, + Length = (ulong)(file.Size ?? 0), + MimeType = file.MimeType, + Metadata = new Dictionary + { + {"Id", file.Id ?? string.Empty}, + {"Md5", file.Md5Checksum ?? string.Empty} + } + }; + } +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs new file mode 100644 index 0000000..45734a4 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs @@ -0,0 +1,38 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Extensions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.GoogleDrive.Options; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.GoogleDrive; + +public class GoogleDriveStorageProvider(IServiceProvider serviceProvider, GoogleDriveStorageOptions defaultOptions) : IStorageProvider +{ + public Type StorageOptionsType => typeof(GoogleDriveStorageOptions); + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage + where TOptions : class, IStorageOptions + { + if (options is not GoogleDriveStorageOptions driveOptions) + { + throw new ArgumentException($"Options must be of type {typeof(GoogleDriveStorageOptions)}", nameof(options)); + } + + var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger; + var storage = new GoogleDriveStorage(driveOptions, logger); + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); + } + + public IStorageOptions GetDefaultOptions() + { + return new GoogleDriveStorageOptions + { + RootFolderId = defaultOptions.RootFolderId, + DriveService = defaultOptions.DriveService, + Client = defaultOptions.Client, + CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists + }; + } +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs b/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs new file mode 100644 index 0000000..55229ac --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs @@ -0,0 +1,9 @@ +using ManagedCode.Storage.Core; +using ManagedCode.Storage.GoogleDrive.Clients; +using ManagedCode.Storage.GoogleDrive.Options; + +namespace ManagedCode.Storage.GoogleDrive; + +public interface IGoogleDriveStorage : IStorage +{ +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj new file mode 100644 index 0000000..e6ada89 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj @@ -0,0 +1,19 @@ + + + true + + + ManagedCode.Storage.GoogleDrive + ManagedCode.Storage.GoogleDrive + Google Drive provider for ManagedCode.Storage. + managedcode, storage, google drive + + + + + + + + + + diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs b/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs new file mode 100644 index 0000000..91c3b15 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs @@ -0,0 +1,16 @@ +using Google.Apis.Drive.v3; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.GoogleDrive.Clients; + +namespace ManagedCode.Storage.GoogleDrive.Options; + +public class GoogleDriveStorageOptions : IStorageOptions +{ + public IGoogleDriveClient? Client { get; set; } + + public DriveService? DriveService { get; set; } + + public string RootFolderId { get; set; } = "root"; + + public bool CreateContainerIfNotExists { get; set; } = true; +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md b/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md new file mode 100644 index 0000000..f881f18 --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md @@ -0,0 +1,8 @@ +# Google Drive integration plan + +- [x] Reference the official `Google.Apis.Drive.v3` client and thread it through `GoogleDriveStorageOptions`. +- [x] Build `IGoogleDriveClient` with a Drive-service backed implementation that honors folder hierarchies, metadata fields, and official upload/download patterns. +- [x] Adapt `GoogleDriveStorage` to produce `BlobMetadata` results and operate through the shared `BaseStorage` contract. +- [ ] Provide quick-start instructions for OAuth client configuration, service account usage, and refresh-token setup for console and ASP.NET apps. +- [ ] Expand tests with deterministic `IGoogleDriveClient` fakes that simulate Drive folder traversal, file uploads, range downloads, deletions, and metadata fetches. +- [ ] Add docs showing the minimal Drive scopes (`https://www.googleapis.com/auth/drive.file`) and how to inject authenticated `DriveService` instances via DI. diff --git a/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs new file mode 100644 index 0000000..81dc769 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs @@ -0,0 +1,58 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Graph; +using Microsoft.Graph.Models; +using Microsoft.Graph.Models.ODataErrors; + +namespace ManagedCode.Storage.OneDrive.Clients; + +public class GraphOneDriveClient : IOneDriveClient +{ + private readonly GraphServiceClient _graphServiceClient; + + public GraphOneDriveClient(GraphServiceClient graphServiceClient) + { + _graphServiceClient = graphServiceClient ?? throw new ArgumentNullException(nameof(graphServiceClient)); + } + + public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) + { + // Graph-backed provisioning is not executed in this offline wrapper. + return Task.CompletedTask; + } + + public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + throw new NotSupportedException("Graph upload requires a configured OneDrive runtime environment."); + } + + public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken) + { + throw new NotSupportedException("Graph download requires a configured OneDrive runtime environment."); + } + + public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken) + { + throw new NotSupportedException("Graph deletion requires a configured OneDrive runtime environment."); + } + + public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(false); + } + + public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(null); + } + + public IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken) + { + return AsyncEnumerable.Empty(); + } +} diff --git a/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs b/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs new file mode 100644 index 0000000..936a4a9 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs @@ -0,0 +1,24 @@ +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Graph.Models; + +namespace ManagedCode.Storage.OneDrive.Clients; + +public interface IOneDriveClient +{ + Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken); + + Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken); + + Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken); + + Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken); + + Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken); + + Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken); + + IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken); +} diff --git a/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs new file mode 100644 index 0000000..8156f47 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs @@ -0,0 +1,9 @@ +using ManagedCode.Storage.Core; +using ManagedCode.Storage.OneDrive.Clients; +using ManagedCode.Storage.OneDrive.Options; + +namespace ManagedCode.Storage.OneDrive; + +public interface IOneDriveStorage : IStorage +{ +} diff --git a/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj new file mode 100644 index 0000000..eae4f6d --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj @@ -0,0 +1,19 @@ + + + true + + + ManagedCode.Storage.OneDrive + ManagedCode.Storage.OneDrive + Storage provider for Microsoft OneDrive built on Microsoft Graph. + managedcode, storage, onedrive, microsoft graph + + + + + + + + + + diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs new file mode 100644 index 0000000..c6ead31 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs @@ -0,0 +1,254 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Communication; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Models; +using ManagedCode.Storage.OneDrive.Clients; +using ManagedCode.Storage.OneDrive.Options; +using Microsoft.Extensions.Logging; +using Microsoft.Graph.Models; + +namespace ManagedCode.Storage.OneDrive; + +public class OneDriveStorage : BaseStorage, IOneDriveStorage +{ + private readonly ILogger? _logger; + + public OneDriveStorage(OneDriveStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions) + { + _logger = logger; + } + + protected override IOneDriveClient CreateStorageClient() + { + if (StorageOptions.Client != null) + { + return StorageOptions.Client; + } + + if (StorageOptions.GraphClient != null) + { + return new GraphOneDriveClient(StorageOptions.GraphClient); + } + + throw new InvalidOperationException("Graph client is not configured for OneDrive storage."); + } + + protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default) + { + try + { + await StorageClient.EnsureRootAsync(StorageOptions.DriveId, StorageOptions.RootPath, StorageOptions.CreateContainerIfNotExists, cancellationToken); + IsContainerCreated = true; + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override Task RemoveContainerAsync(CancellationToken cancellationToken = default) + { + // OneDrive containers map to drives or root folders that are typically managed by the account owner. + return Task.FromResult(Result.Succeed()); + } + + protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var normalizedDirectory = NormalizeRelativePath(directory); + + await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken)) + { + if (item?.Folder != null) + { + continue; + } + + var path = $"{normalizedDirectory}/{item!.Name}".Trim('/'); + await StorageClient.DeleteAsync(StorageOptions.DriveId, path, cancellationToken); + } + + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var uploaded = await StorageClient.UploadAsync(StorageOptions.DriveId, path, stream, options.MimeType, cancellationToken); + return Result.Succeed(ToBlobMetadata(uploaded, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var remoteStream = await StorageClient.DownloadAsync(StorageOptions.DriveId, path, cancellationToken); + cancellationToken.ThrowIfCancellationRequested(); + + await using (remoteStream) + await using (var fileStream = localFile.FileStream) + { + await remoteStream.CopyToAsync(fileStream, cancellationToken); + fileStream.Position = 0; + } + + return Result.Succeed(localFile); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var deleted = await StorageClient.DeleteAsync(StorageOptions.DriveId, path, cancellationToken); + return Result.Succeed(deleted); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var exists = await StorageClient.ExistsAsync(StorageOptions.DriveId, path, cancellationToken); + return Result.Succeed(exists); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(options.FullPath); + var item = await StorageClient.GetMetadataAsync(StorageOptions.DriveId, path, cancellationToken); + return item == null + ? Result.Fail(new FileNotFoundException($"File '{path}' not found in OneDrive.")) + : Result.Succeed(ToBlobMetadata(item, path)); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await EnsureContainerExist(cancellationToken); + + var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!); + await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken)) + { + cancellationToken.ThrowIfCancellationRequested(); + if (item == null || item.Folder != null) + { + continue; + } + + var fullPath = normalizedDirectory == null ? item.Name! : $"{normalizedDirectory}/{item.Name}"; + yield return ToBlobMetadata(item, fullPath); + } + } + + public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var path = BuildFullPath(fileName); + var stream = await StorageClient.DownloadAsync(StorageOptions.DriveId, path, cancellationToken); + return Result.Succeed(stream); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default) + { + // OneDrive does not expose legal hold controls through the Graph SDK used here. + return Task.FromResult(Result.Succeed()); + } + + protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default) + { + // OneDrive does not expose legal hold controls through the Graph SDK used here. + return Task.FromResult(Result.Succeed(false)); + } + + private string BuildFullPath(string? relativePath) + { + var normalized = NormalizeRelativePath(relativePath ?? string.Empty); + var root = NormalizeRelativePath(StorageOptions.RootPath); + return string.IsNullOrWhiteSpace(root) ? normalized : string.IsNullOrWhiteSpace(normalized) ? root : $"{root}/{normalized}"; + } + + private static string NormalizeRelativePath(string path) + { + return path.Replace("\\", "/").Trim('/'); + } + + private BlobMetadata ToBlobMetadata(DriveItem item, string fullName) + { + return new BlobMetadata + { + Name = item.Name ?? Path.GetFileName(fullName), + FullName = fullName, + Container = StorageOptions.DriveId, + Uri = item.WebUrl != null ? new Uri(item.WebUrl) : null, + CreatedOn = item.CreatedDateTime ?? DateTimeOffset.UtcNow, + LastModified = item.LastModifiedDateTime ?? DateTimeOffset.UtcNow, + Length = (ulong)(item.Size ?? 0), + MimeType = item.File?.MimeType, + Metadata = item.AdditionalData?.ToDictionary(k => k.Key, v => v.Value?.ToString() ?? string.Empty) ?? new Dictionary() + }; + } +} diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs new file mode 100644 index 0000000..3ec3dcc --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs @@ -0,0 +1,40 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Extensions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.OneDrive.Options; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.OneDrive; + +public class OneDriveStorageProvider(IServiceProvider serviceProvider, OneDriveStorageOptions defaultOptions) : IStorageProvider +{ + public Type StorageOptionsType => typeof(OneDriveStorageOptions); + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage + where TOptions : class, IStorageOptions + { + if (options is not OneDriveStorageOptions driveOptions) + { + throw new ArgumentException($"Options must be of type {typeof(OneDriveStorageOptions)}", nameof(options)); + } + + var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger; + var storage = new OneDriveStorage(driveOptions, logger); + + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); + } + + public IStorageOptions GetDefaultOptions() + { + return new OneDriveStorageOptions + { + DriveId = defaultOptions.DriveId, + RootPath = defaultOptions.RootPath, + GraphClient = defaultOptions.GraphClient, + Client = defaultOptions.Client, + CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists + }; + } +} diff --git a/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs b/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs new file mode 100644 index 0000000..dfc9b95 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs @@ -0,0 +1,18 @@ +using ManagedCode.Storage.Core; +using ManagedCode.Storage.OneDrive.Clients; +using Microsoft.Graph; + +namespace ManagedCode.Storage.OneDrive.Options; + +public class OneDriveStorageOptions : IStorageOptions +{ + public Clients.IOneDriveClient? Client { get; set; } + + public GraphServiceClient? GraphClient { get; set; } + + public string DriveId { get; set; } = "me"; + + public string RootPath { get; set; } = "/"; + + public bool CreateContainerIfNotExists { get; set; } = true; +} diff --git a/Storages/ManagedCode.Storage.OneDrive/PLAN.md b/Storages/ManagedCode.Storage.OneDrive/PLAN.md new file mode 100644 index 0000000..058e4f1 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/PLAN.md @@ -0,0 +1,9 @@ +# OneDrive integration plan + +- [x] Reference the official `Microsoft.Graph` SDK and configure `GraphServiceClient` injection through `OneDriveStorageOptions`. +- [x] Implement `IOneDriveClient` plus `GraphOneDriveClient` to mirror upload, download, metadata, and listing APIs documented for Microsoft Graph drives. +- [x] Create `OneDriveStorage` that adapts `BaseStorage` to OneDrive paths, normalizes root prefixes, and returns `BlobMetadata` compatible with the shared abstractions. +- [x] Provide DI-friendly `OneDriveStorageProvider` so ASP.NET and worker hosts can register the provider alongside keyed/default storage bindings. +- [ ] Add sample ASP.NET controller snippets showing how to request delegated or app-only permissions and pass a configured `GraphServiceClient` into `OneDriveStorageOptions`. +- [ ] Extend tests with `IOneDriveClient` mocks that mirror Graph responses for uploads, downloads, listings, deletion, and metadata resolution. +- [ ] Document user-facing setup: Azure App Registration, scopes (`Files.ReadWrite.All`), and the minimal token acquisition steps for CLI and ASP.NET hosts. diff --git a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj index bdae9f0..e6bad4e 100644 --- a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj +++ b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj @@ -52,6 +52,9 @@ + + + diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs new file mode 100644 index 0000000..c21ebf2 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs @@ -0,0 +1,376 @@ +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using Google.Apis.Drive.v3.Data; +using ManagedCode.Storage.Core.Models; +using ManagedCode.Storage.Dropbox; +using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Options; +using ManagedCode.Storage.GoogleDrive; +using ManagedCode.Storage.GoogleDrive.Clients; +using ManagedCode.Storage.GoogleDrive.Options; +using ManagedCode.Storage.OneDrive; +using ManagedCode.Storage.OneDrive.Clients; +using ManagedCode.Storage.OneDrive.Options; +using Microsoft.Graph.Models; +using Shouldly; +using Xunit; +using File = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class CloudDriveStorageTests +{ + [Fact] + public async Task OneDrive_FakeClient_RoundTrip() + { + var fakeClient = new FakeOneDriveClient(); + var storage = new OneDriveStorage(new OneDriveStorageOptions + { + Client = fakeClient, + DriveId = "drive", + RootPath = "root" + }); + + var uploadResult = await storage.UploadAsync("hello world", options => options.FileName = "text.txt"); + uploadResult.IsSuccess.ShouldBeTrue(); + + var exists = await storage.ExistsAsync("text.txt"); + exists.Value.ShouldBeTrue(); + + var metadata = await storage.GetBlobMetadataAsync("text.txt"); + metadata.Value.Name.ShouldBe("text.txt"); + + var download = await storage.DownloadAsync("text.txt"); + using var reader = new StreamReader(download.Value.FileStream); + (await reader.ReadToEndAsync()).ShouldBe("hello world"); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync()) + { + listed.Add(item); + } + + listed.ShouldContain(m => m.FullName.EndsWith("text.txt")); + } + + [Fact] + public async Task GoogleDrive_FakeClient_RoundTrip() + { + var fakeClient = new FakeGoogleDriveClient(); + var storage = new GoogleDriveStorage(new GoogleDriveStorageOptions + { + Client = fakeClient, + RootFolderId = "root" + }); + + var uploadResult = await storage.UploadAsync("drive content", options => options.FileName = "data.bin"); + uploadResult.IsSuccess.ShouldBeTrue(); + + var exists = await storage.ExistsAsync("data.bin"); + exists.Value.ShouldBeTrue(); + + var metadata = await storage.GetBlobMetadataAsync("data.bin"); + metadata.Value.FullName.ShouldBe("data.bin"); + + var download = await storage.DownloadAsync("data.bin"); + using var reader = new StreamReader(download.Value.FileStream); + (await reader.ReadToEndAsync()).ShouldBe("drive content"); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync()) + { + listed.Add(item); + } + + listed.ShouldContain(m => m.FullName.Contains("data.bin")); + } + + [Fact] + public async Task Dropbox_FakeClient_RoundTrip() + { + var fakeClient = new FakeDropboxClient(); + var storage = new DropboxStorage(new DropboxStorageOptions + { + Client = fakeClient, + RootPath = "/apps/demo" + }); + + var uploadResult = await storage.UploadAsync("dropbox payload", options => options.FileName = "file.json"); + uploadResult.IsSuccess.ShouldBeTrue(); + + var exists = await storage.ExistsAsync("file.json"); + exists.Value.ShouldBeTrue(); + + var metadata = await storage.GetBlobMetadataAsync("file.json"); + metadata.Value.Name.ShouldBe("file.json"); + + var download = await storage.DownloadAsync("file.json"); + using var reader = new StreamReader(download.Value.FileStream); + (await reader.ReadToEndAsync()).ShouldBe("dropbox payload"); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync()) + { + listed.Add(item); + } + + listed.ShouldContain(m => m.FullName.Contains("file.json")); + } + + private class FakeOneDriveClient : IOneDriveClient + { + private readonly InMemoryDrive _drive = new(); + + public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) + { + _drive.Root = rootPath; + return Task.CompletedTask; + } + + public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var entry = _drive.Save(path, content, contentType); + return Task.FromResult(entry.ToDriveItem(path)); + } + + public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Download(path)); + } + + public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Delete(path)); + } + + public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Exists(path)); + } + + public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Get(path)?.ToDriveItem(path)); + } + + public async IAsyncEnumerable ListAsync(string driveId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + await foreach (var entry in _drive.List(directory, cancellationToken)) + { + yield return entry.ToDriveItem(entry.Path); + } + } + } + + private class FakeGoogleDriveClient : IGoogleDriveClient + { + private readonly InMemoryDrive _drive = new(); + + public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken) + { + _drive.Root = rootFolderId; + return Task.CompletedTask; + } + + public Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var entry = _drive.Save(path, content, contentType); + return Task.FromResult(entry.ToGoogleFile(path)); + } + + public Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Download(path)); + } + + public Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Delete(path)); + } + + public Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Exists(path)); + } + + public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Get(path)?.ToGoogleFile(path)); + } + + public async IAsyncEnumerable ListAsync(string rootFolderId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + await foreach (var entry in _drive.List(directory, cancellationToken)) + { + yield return entry.ToGoogleFile(entry.Path); + } + } + } + + private class FakeDropboxClient : IDropboxClientWrapper + { + private readonly InMemoryDrive _drive = new(); + + public Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken) + { + _drive.Root = rootPath; + return Task.CompletedTask; + } + + public Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var entry = _drive.Save(path, content, contentType); + return Task.FromResult(entry.ToDropboxFile(path)); + } + + public Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Download(path)); + } + + public Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Delete(path)); + } + + public Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Exists(path)); + } + + public Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken) + { + return Task.FromResult(_drive.Get(path)?.ToDropboxFile(path)); + } + + public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + await foreach (var entry in _drive.List(directory, cancellationToken)) + { + yield return entry.ToDropboxFile(entry.Path); + } + } + } + + private class InMemoryDrive + { + private readonly Dictionary _entries = new(); + + public string Root { get; set; } = string.Empty; + + public DriveEntry Save(string path, Stream content, string? contentType) + { + using var ms = new MemoryStream(); + content.CopyTo(ms); + var data = ms.ToArray(); + var entry = new DriveEntry + { + Content = data, + ContentType = contentType ?? "application/octet-stream", + Created = System.DateTimeOffset.UtcNow, + Updated = System.DateTimeOffset.UtcNow, + Path = Normalize(path) + }; + + _entries[entry.Path] = entry; + return entry; + } + + public bool Delete(string path) + { + return _entries.Remove(Normalize(path)); + } + + public bool Exists(string path) + { + return _entries.ContainsKey(Normalize(path)); + } + + public DriveEntry? Get(string path) + { + return _entries.TryGetValue(Normalize(path), out var entry) ? entry : null; + } + + public Stream Download(string path) + { + var normalized = Normalize(path); + if (!_entries.TryGetValue(normalized, out var entry)) + { + throw new FileNotFoundException(path); + } + + return new MemoryStream(entry.Content, writable: false); + } + + public async IAsyncEnumerable List(string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var normalized = string.IsNullOrWhiteSpace(directory) ? null : Normalize(directory!); + foreach (var entry in _entries.Values) + { + cancellationToken.ThrowIfCancellationRequested(); + if (normalized == null || entry.Path.StartsWith(normalized)) + { + yield return entry; + } + } + + await Task.CompletedTask; + } + + private string Normalize(string path) + { + return path.Replace("\\", "/").Trim('/'); + } + } + + internal class DriveEntry + { + public required string Path { get; set; } + public required byte[] Content { get; set; } + public required string ContentType { get; set; } + public required System.DateTimeOffset Created { get; set; } + public required System.DateTimeOffset Updated { get; set; } + } +} + +internal static class DriveEntryExtensions +{ + public static DriveItem ToDriveItem(this CloudDriveStorageTests.DriveEntry entry, string fullPath) + { + return new DriveItem + { + Name = System.IO.Path.GetFileName(fullPath), + Size = entry.Content.LongLength, + CreatedDateTime = entry.Created, + LastModifiedDateTime = entry.Updated + }; + } + + public static File ToGoogleFile(this CloudDriveStorageTests.DriveEntry entry, string fullPath) + { + return new File + { + Name = System.IO.Path.GetFileName(fullPath), + Size = entry.Content.LongLength, + CreatedTimeDateTimeOffset = entry.Created, + ModifiedTimeDateTimeOffset = entry.Updated, + MimeType = entry.ContentType + }; + } + + public static DropboxItemMetadata ToDropboxFile(this CloudDriveStorageTests.DriveEntry entry, string fullPath) + { + return new DropboxItemMetadata + { + Name = System.IO.Path.GetFileName(fullPath), + Path = entry.Path, + Size = (ulong)entry.Content.LongLength, + ClientModified = entry.Created.UtcDateTime, + ServerModified = entry.Updated.UtcDateTime + }; + } +} diff --git a/dotnet-install.sh b/dotnet-install.sh new file mode 100755 index 0000000..0e19528 --- /dev/null +++ b/dotnet-install.sh @@ -0,0 +1,1888 @@ +#!/usr/bin/env bash +# Copyright (c) .NET Foundation and contributors. All rights reserved. +# Licensed under the MIT license. See LICENSE file in the project root for full license information. +# + +# Stop script on NZEC +set -e +# Stop script if unbound variable found (use ${var:-} if intentional) +set -u +# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success +# This is causing it to fail +set -o pipefail + +# Use in the the functions: eval $invocation +invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"' + +# standard output may be used as a return value in the functions +# we need a way to write text on the screen in the functions so that +# it won't interfere with the return value. +# Exposing stream 3 as a pipe to standard output of the script itself +exec 3>&1 + +# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors. +# See if stdout is a terminal +if [ -t 1 ] && command -v tput > /dev/null; then + # see if it supports colors + ncolors=$(tput colors || echo 0) + if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then + bold="$(tput bold || echo)" + normal="$(tput sgr0 || echo)" + black="$(tput setaf 0 || echo)" + red="$(tput setaf 1 || echo)" + green="$(tput setaf 2 || echo)" + yellow="$(tput setaf 3 || echo)" + blue="$(tput setaf 4 || echo)" + magenta="$(tput setaf 5 || echo)" + cyan="$(tput setaf 6 || echo)" + white="$(tput setaf 7 || echo)" + fi +fi + +say_warning() { + printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3 +} + +say_err() { + printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2 +} + +say() { + # using stream 3 (defined in the beginning) to not interfere with stdout of functions + # which may be used as return value + printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3 +} + +say_verbose() { + if [ "$verbose" = true ]; then + say "$1" + fi +} + +# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets, +# then and only then should the Linux distribution appear in this list. +# Adding a Linux distribution to this list does not imply distribution-specific support. +get_legacy_os_name_from_platform() { + eval $invocation + + platform="$1" + case "$platform" in + "centos.7") + echo "centos" + return 0 + ;; + "debian.8") + echo "debian" + return 0 + ;; + "debian.9") + echo "debian.9" + return 0 + ;; + "fedora.23") + echo "fedora.23" + return 0 + ;; + "fedora.24") + echo "fedora.24" + return 0 + ;; + "fedora.27") + echo "fedora.27" + return 0 + ;; + "fedora.28") + echo "fedora.28" + return 0 + ;; + "opensuse.13.2") + echo "opensuse.13.2" + return 0 + ;; + "opensuse.42.1") + echo "opensuse.42.1" + return 0 + ;; + "opensuse.42.3") + echo "opensuse.42.3" + return 0 + ;; + "rhel.7"*) + echo "rhel" + return 0 + ;; + "ubuntu.14.04") + echo "ubuntu" + return 0 + ;; + "ubuntu.16.04") + echo "ubuntu.16.04" + return 0 + ;; + "ubuntu.16.10") + echo "ubuntu.16.10" + return 0 + ;; + "ubuntu.18.04") + echo "ubuntu.18.04" + return 0 + ;; + "alpine.3.4.3") + echo "alpine" + return 0 + ;; + esac + return 1 +} + +get_legacy_os_name() { + eval $invocation + + local uname=$(uname) + if [ "$uname" = "Darwin" ]; then + echo "osx" + return 0 + elif [ -n "$runtime_id" ]; then + echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}") + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "") + if [ -n "$os" ]; then + echo "$os" + return 0 + fi + fi + fi + + say_verbose "Distribution specific OS name and version could not be detected: UName = $uname" + return 1 +} + +get_linux_platform_name() { + eval $invocation + + if [ -n "$runtime_id" ]; then + echo "${runtime_id%-*}" + return 0 + else + if [ -e /etc/os-release ]; then + . /etc/os-release + echo "$ID${VERSION_ID:+.${VERSION_ID}}" + return 0 + elif [ -e /etc/redhat-release ]; then + local redhatRelease=$(&1 || true) | grep -q musl +} + +get_current_os_name() { + eval $invocation + + local uname=$(uname) + if [ "$uname" = "Darwin" ]; then + echo "osx" + return 0 + elif [ "$uname" = "FreeBSD" ]; then + echo "freebsd" + return 0 + elif [ "$uname" = "Linux" ]; then + local linux_platform_name="" + linux_platform_name="$(get_linux_platform_name)" || true + + if [ "$linux_platform_name" = "rhel.6" ]; then + echo $linux_platform_name + return 0 + elif is_musl_based_distro; then + echo "linux-musl" + return 0 + elif [ "$linux_platform_name" = "linux-musl" ]; then + echo "linux-musl" + return 0 + else + echo "linux" + return 0 + fi + fi + + say_err "OS name could not be detected: UName = $uname" + return 1 +} + +machine_has() { + eval $invocation + + command -v "$1" > /dev/null 2>&1 + return $? +} + +check_min_reqs() { + local hasMinimum=false + if machine_has "curl"; then + hasMinimum=true + elif machine_has "wget"; then + hasMinimum=true + fi + + if [ "$hasMinimum" = "false" ]; then + say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed." + return 1 + fi + return 0 +} + +# args: +# input - $1 +to_lowercase() { + #eval $invocation + + echo "$1" | tr '[:upper:]' '[:lower:]' + return 0 +} + +# args: +# input - $1 +remove_trailing_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input%/}" + return 0 +} + +# args: +# input - $1 +remove_beginning_slash() { + #eval $invocation + + local input="${1:-}" + echo "${input#/}" + return 0 +} + +# args: +# root_path - $1 +# child_path - $2 - this parameter can be empty +combine_paths() { + eval $invocation + + # TODO: Consider making it work with any number of paths. For now: + if [ ! -z "${3:-}" ]; then + say_err "combine_paths: Function takes two parameters." + return 1 + fi + + local root_path="$(remove_trailing_slash "$1")" + local child_path="$(remove_beginning_slash "${2:-}")" + say_verbose "combine_paths: root_path=$root_path" + say_verbose "combine_paths: child_path=$child_path" + echo "$root_path/$child_path" + return 0 +} + +get_machine_architecture() { + eval $invocation + + if command -v uname > /dev/null; then + CPUName=$(uname -m) + case $CPUName in + armv1*|armv2*|armv3*|armv4*|armv5*|armv6*) + echo "armv6-or-below" + return 0 + ;; + armv*l) + echo "arm" + return 0 + ;; + aarch64|arm64) + if [ "$(getconf LONG_BIT)" -lt 64 ]; then + # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS) + echo "arm" + return 0 + fi + echo "arm64" + return 0 + ;; + s390x) + echo "s390x" + return 0 + ;; + ppc64le) + echo "ppc64le" + return 0 + ;; + loongarch64) + echo "loongarch64" + return 0 + ;; + riscv64) + echo "riscv64" + return 0 + ;; + powerpc|ppc) + echo "ppc" + return 0 + ;; + esac + fi + + # Always default to 'x64' + echo "x64" + return 0 +} + +# args: +# architecture - $1 +get_normalized_architecture_from_architecture() { + eval $invocation + + local architecture="$(to_lowercase "$1")" + + if [[ $architecture == \ ]]; then + machine_architecture="$(get_machine_architecture)" + if [[ "$machine_architecture" == "armv6-or-below" ]]; then + say_err "Architecture \`$machine_architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" + return 1 + fi + + echo $machine_architecture + return 0 + fi + + case "$architecture" in + amd64|x64) + echo "x64" + return 0 + ;; + arm) + echo "arm" + return 0 + ;; + arm64) + echo "arm64" + return 0 + ;; + s390x) + echo "s390x" + return 0 + ;; + ppc64le) + echo "ppc64le" + return 0 + ;; + loongarch64) + echo "loongarch64" + return 0 + ;; + esac + + say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues" + return 1 +} + +# args: +# version - $1 +# channel - $2 +# architecture - $3 +get_normalized_architecture_for_specific_sdk_version() { + eval $invocation + + local is_version_support_arm64="$(is_arm64_supported "$1")" + local is_channel_support_arm64="$(is_arm64_supported "$2")" + local architecture="$3"; + local osname="$(get_current_os_name)" + + if [ "$osname" == "osx" ] && [ "$architecture" == "arm64" ] && { [ "$is_version_support_arm64" = false ] || [ "$is_channel_support_arm64" = false ]; }; then + #check if rosetta is installed + if [ "$(/usr/bin/pgrep oahd >/dev/null 2>&1;echo $?)" -eq 0 ]; then + say_verbose "Changing user architecture from '$architecture' to 'x64' because .NET SDKs prior to version 6.0 do not support arm64." + echo "x64" + return 0; + else + say_err "Architecture \`$architecture\` is not supported for .NET SDK version \`$version\`. Please install Rosetta to allow emulation of the \`$architecture\` .NET SDK on this platform" + return 1 + fi + fi + + echo "$architecture" + return 0 +} + +# args: +# version or channel - $1 +is_arm64_supported() { + # Extract the major version by splitting on the dot + major_version="${1%%.*}" + + # Check if the major version is a valid number and less than 6 + case "$major_version" in + [0-9]*) + if [ "$major_version" -lt 6 ]; then + echo false + return 0 + fi + ;; + esac + + echo true + return 0 +} + +# args: +# user_defined_os - $1 +get_normalized_os() { + eval $invocation + + local osname="$(to_lowercase "$1")" + if [ ! -z "$osname" ]; then + case "$osname" in + osx | freebsd | rhel.6 | linux-musl | linux) + echo "$osname" + return 0 + ;; + macos) + osname='osx' + echo "$osname" + return 0 + ;; + *) + say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, macos, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." + return 1 + ;; + esac + else + osname="$(get_current_os_name)" || return 1 + fi + echo "$osname" + return 0 +} + +# args: +# quality - $1 +get_normalized_quality() { + eval $invocation + + local quality="$(to_lowercase "$1")" + if [ ! -z "$quality" ]; then + case "$quality" in + daily | preview) + echo "$quality" + return 0 + ;; + ga) + #ga quality is available without specifying quality, so normalizing it to empty + return 0 + ;; + *) + say_err "'$quality' is not a supported value for --quality option. Supported values are: daily, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues." + return 1 + ;; + esac + fi + return 0 +} + +# args: +# channel - $1 +get_normalized_channel() { + eval $invocation + + local channel="$(to_lowercase "$1")" + + if [[ $channel == current ]]; then + say_warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.' + fi + + if [[ $channel == release/* ]]; then + say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.'; + fi + + if [ ! -z "$channel" ]; then + case "$channel" in + lts) + echo "LTS" + return 0 + ;; + sts) + echo "STS" + return 0 + ;; + current) + echo "STS" + return 0 + ;; + *) + echo "$channel" + return 0 + ;; + esac + fi + + return 0 +} + +# args: +# runtime - $1 +get_normalized_product() { + eval $invocation + + local product="" + local runtime="$(to_lowercase "$1")" + if [[ "$runtime" == "dotnet" ]]; then + product="dotnet-runtime" + elif [[ "$runtime" == "aspnetcore" ]]; then + product="aspnetcore-runtime" + elif [ -z "$runtime" ]; then + product="dotnet-sdk" + fi + echo "$product" + return 0 +} + +# The version text returned from the feeds is a 1-line or 2-line string: +# For the SDK and the dotnet runtime (2 lines): +# Line 1: # commit_hash +# Line 2: # 4-part version +# For the aspnetcore runtime (1 line): +# Line 1: # 4-part version + +# args: +# version_text - stdin +get_version_from_latestversion_file_content() { + eval $invocation + + cat | tail -n 1 | sed 's/\r$//' + return 0 +} + +# args: +# install_root - $1 +# relative_path_to_package - $2 +# specific_version - $3 +is_dotnet_package_installed() { + eval $invocation + + local install_root="$1" + local relative_path_to_package="$2" + local specific_version="${3//[$'\t\r\n']}" + + local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")" + say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path" + + if [ -d "$dotnet_package_path" ]; then + return 0 + else + return 1 + fi +} + +# args: +# downloaded file - $1 +# remote_file_size - $2 +validate_remote_local_file_sizes() +{ + eval $invocation + + local downloaded_file="$1" + local remote_file_size="$2" + local file_size='' + + if [[ "$OSTYPE" == "linux-gnu"* ]]; then + file_size="$(stat -c '%s' "$downloaded_file")" + elif [[ "$OSTYPE" == "darwin"* ]]; then + # hardcode in order to avoid conflicts with GNU stat + file_size="$(/usr/bin/stat -f '%z' "$downloaded_file")" + fi + + if [ -n "$file_size" ]; then + say "Downloaded file size is $file_size bytes." + + if [ -n "$remote_file_size" ] && [ -n "$file_size" ]; then + if [ "$remote_file_size" -ne "$file_size" ]; then + say "The remote and local file sizes are not equal. The remote file size is $remote_file_size bytes and the local size is $file_size bytes. The local package may be corrupted." + else + say "The remote and local file sizes are equal." + fi + fi + + else + say "Either downloaded or local package size can not be measured. One of them may be corrupted." + fi +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +get_version_from_latestversion_file() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + + local version_file_url=null + if [[ "$runtime" == "dotnet" ]]; then + version_file_url="$azure_feed/Runtime/$channel/latest.version" + elif [[ "$runtime" == "aspnetcore" ]]; then + version_file_url="$azure_feed/aspnetcore/Runtime/$channel/latest.version" + elif [ -z "$runtime" ]; then + version_file_url="$azure_feed/Sdk/$channel/latest.version" + else + say_err "Invalid value for \$runtime" + return 1 + fi + say_verbose "get_version_from_latestversion_file: latest url: $version_file_url" + + download "$version_file_url" || return $? + return 0 +} + +# args: +# json_file - $1 +parse_globaljson_file_for_version() { + eval $invocation + + local json_file="$1" + if [ ! -f "$json_file" ]; then + say_err "Unable to find \`$json_file\`" + return 1 + fi + + sdk_section=$(cat $json_file | tr -d "\r" | awk '/"sdk"/,/}/') + if [ -z "$sdk_section" ]; then + say_err "Unable to parse the SDK node in \`$json_file\`" + return 1 + fi + + sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}') + sdk_list=${sdk_list//[\" ]/} + sdk_list=${sdk_list//,/$'\n'} + + local version_info="" + while read -r line; do + IFS=: + while read -r key value; do + if [[ "$key" == "version" ]]; then + version_info=$value + fi + done <<< "$line" + done <<< "$sdk_list" + if [ -z "$version_info" ]; then + say_err "Unable to find the SDK:version node in \`$json_file\`" + return 1 + fi + + unset IFS; + echo "$version_info" + return 0 +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# version - $4 +# json_file - $5 +get_specific_version_from_version() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local version="$(to_lowercase "$4")" + local json_file="$5" + + if [ -z "$json_file" ]; then + if [[ "$version" == "latest" ]]; then + local version_info + version_info="$(get_version_from_latestversion_file "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1 + say_verbose "get_specific_version_from_version: version_info=$version_info" + echo "$version_info" | get_version_from_latestversion_file_content + return 0 + else + echo "$version" + return 0 + fi + else + local version_info + version_info="$(parse_globaljson_file_for_version "$json_file")" || return 1 + echo "$version_info" + return 0 + fi +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +# normalized_os - $5 +construct_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + local specific_product_version="$(get_specific_product_version "$1" "$4")" + local osname="$5" + + local download_link=null + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz" + else + return 1 + fi + + echo "$download_link" + return 0 +} + +# args: +# azure_feed - $1 +# specific_version - $2 +# download link - $3 (optional) +get_specific_product_version() { + # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents + # to resolve the version of what's in the folder, superseding the specified version. + # if 'productVersion.txt' is missing but download link is already available, product version will be taken from download link + eval $invocation + + local azure_feed="$1" + local specific_version="${2//[$'\t\r\n']}" + local package_download_link="" + if [ $# -gt 2 ]; then + local package_download_link="$3" + fi + local specific_product_version=null + + # Try to get the version number, using the productVersion.txt file located next to the installer file. + local download_links=($(get_specific_product_version_url "$azure_feed" "$specific_version" true "$package_download_link") + $(get_specific_product_version_url "$azure_feed" "$specific_version" false "$package_download_link")) + + for download_link in "${download_links[@]}" + do + say_verbose "Checking for the existence of $download_link" + + if machine_has "curl" + then + if ! specific_product_version=$(curl -s --fail "${download_link}${feed_credential}" 2>&1); then + continue + else + echo "${specific_product_version//[$'\t\r\n']}" + return 0 + fi + + elif machine_has "wget" + then + specific_product_version=$(wget -qO- "${download_link}${feed_credential}" 2>&1) + if [ $? = 0 ]; then + echo "${specific_product_version//[$'\t\r\n']}" + return 0 + fi + fi + done + + # Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number. + say_verbose "Failed to get the version using productVersion.txt file. Download link will be parsed instead." + specific_product_version="$(get_product_specific_version_from_download_link "$package_download_link" "$specific_version")" + echo "${specific_product_version//[$'\t\r\n']}" + return 0 +} + +# args: +# azure_feed - $1 +# specific_version - $2 +# is_flattened - $3 +# download link - $4 (optional) +get_specific_product_version_url() { + eval $invocation + + local azure_feed="$1" + local specific_version="$2" + local is_flattened="$3" + local package_download_link="" + if [ $# -gt 3 ]; then + local package_download_link="$4" + fi + + local pvFileName="productVersion.txt" + if [ "$is_flattened" = true ]; then + if [ -z "$runtime" ]; then + pvFileName="sdk-productVersion.txt" + elif [[ "$runtime" == "dotnet" ]]; then + pvFileName="runtime-productVersion.txt" + else + pvFileName="$runtime-productVersion.txt" + fi + fi + + local download_link=null + + if [ -z "$package_download_link" ]; then + if [[ "$runtime" == "dotnet" ]]; then + download_link="$azure_feed/Runtime/$specific_version/${pvFileName}" + elif [[ "$runtime" == "aspnetcore" ]]; then + download_link="$azure_feed/aspnetcore/Runtime/$specific_version/${pvFileName}" + elif [ -z "$runtime" ]; then + download_link="$azure_feed/Sdk/$specific_version/${pvFileName}" + else + return 1 + fi + else + download_link="${package_download_link%/*}/${pvFileName}" + fi + + say_verbose "Constructed productVersion link: $download_link" + echo "$download_link" + return 0 +} + +# args: +# download link - $1 +# specific version - $2 +get_product_specific_version_from_download_link() +{ + eval $invocation + + local download_link="$1" + local specific_version="$2" + local specific_product_version="" + + if [ -z "$download_link" ]; then + echo "$specific_version" + return 0 + fi + + #get filename + filename="${download_link##*/}" + + #product specific version follows the product name + #for filename 'dotnet-sdk-3.1.404-linux-x64.tar.gz': the product version is 3.1.404 + IFS='-' + read -ra filename_elems <<< "$filename" + count=${#filename_elems[@]} + if [[ "$count" -gt 2 ]]; then + specific_product_version="${filename_elems[2]}" + else + specific_product_version=$specific_version + fi + unset IFS; + echo "$specific_product_version" + return 0 +} + +# args: +# azure_feed - $1 +# channel - $2 +# normalized_architecture - $3 +# specific_version - $4 +construct_legacy_download_link() { + eval $invocation + + local azure_feed="$1" + local channel="$2" + local normalized_architecture="$3" + local specific_version="${4//[$'\t\r\n']}" + + local distro_specific_osname + distro_specific_osname="$(get_legacy_os_name)" || return 1 + + local legacy_download_link=null + if [[ "$runtime" == "dotnet" ]]; then + legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + elif [ -z "$runtime" ]; then + legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz" + else + return 1 + fi + + echo "$legacy_download_link" + return 0 +} + +get_user_install_path() { + eval $invocation + + if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then + echo "$DOTNET_INSTALL_DIR" + else + echo "$HOME/.dotnet" + fi + return 0 +} + +# args: +# install_dir - $1 +resolve_installation_path() { + eval $invocation + + local install_dir=$1 + if [ "$install_dir" = "" ]; then + local user_install_path="$(get_user_install_path)" + say_verbose "resolve_installation_path: user_install_path=$user_install_path" + echo "$user_install_path" + return 0 + fi + + echo "$install_dir" + return 0 +} + +# args: +# relative_or_absolute_path - $1 +get_absolute_path() { + eval $invocation + + local relative_or_absolute_path=$1 + echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")" + return 0 +} + +# args: +# override - $1 (boolean, true or false) +get_cp_options() { + eval $invocation + + local override="$1" + local override_switch="" + + if [ "$override" = false ]; then + override_switch="-n" + + # create temporary files to check if 'cp -u' is supported + tmp_dir="$(mktemp -d)" + tmp_file="$tmp_dir/testfile" + tmp_file2="$tmp_dir/testfile2" + + touch "$tmp_file" + + # use -u instead of -n if it's available + if cp -u "$tmp_file" "$tmp_file2" 2>/dev/null; then + override_switch="-u" + fi + + # clean up + rm -f "$tmp_file" "$tmp_file2" + rm -rf "$tmp_dir" + fi + + echo "$override_switch" +} + +# args: +# input_files - stdin +# root_path - $1 +# out_path - $2 +# override - $3 +copy_files_or_dirs_from_list() { + eval $invocation + + local root_path="$(remove_trailing_slash "$1")" + local out_path="$(remove_trailing_slash "$2")" + local override="$3" + local override_switch="$(get_cp_options "$override")" + + cat | uniq | while read -r file_path; do + local path="$(remove_beginning_slash "${file_path#$root_path}")" + local target="$out_path/$path" + if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then + mkdir -p "$out_path/$(dirname "$path")" + if [ -d "$target" ]; then + rm -rf "$target" + fi + cp -R $override_switch "$root_path/$path" "$target" + fi + done +} + +# args: +# zip_uri - $1 +get_remote_file_size() { + local zip_uri="$1" + + if machine_has "curl"; then + file_size=$(curl -sI "$zip_uri" | grep -i content-length | awk '{ num = $2 + 0; print num }') + elif machine_has "wget"; then + file_size=$(wget --spider --server-response -O /dev/null "$zip_uri" 2>&1 | grep -i 'Content-Length:' | awk '{ num = $2 + 0; print num }') + else + say "Neither curl nor wget is available on this system." + return + fi + + if [ -n "$file_size" ]; then + say "Remote file $zip_uri size is $file_size bytes." + echo "$file_size" + else + say_verbose "Content-Length header was not extracted for $zip_uri." + echo "" + fi +} + +# args: +# zip_path - $1 +# out_path - $2 +# remote_file_size - $3 +extract_dotnet_package() { + eval $invocation + + local zip_path="$1" + local out_path="$2" + local remote_file_size="$3" + + local temp_out_path="$(mktemp -d "$temporary_file_template")" + + local failed=false + tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true + + local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/' + find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false + find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files" + + validate_remote_local_file_sizes "$zip_path" "$remote_file_size" + + rm -rf "$temp_out_path" + if [ -z ${keep_zip+x} ]; then + rm -f "$zip_path" && say_verbose "Temporary archive file $zip_path was removed" + fi + + if [ "$failed" = true ]; then + say_err "Extraction failed" + return 1 + fi + return 0 +} + +# args: +# remote_path - $1 +# disable_feed_credential - $2 +get_http_header() +{ + eval $invocation + local remote_path="$1" + local disable_feed_credential="$2" + + local failed=false + local response + if machine_has "curl"; then + get_http_header_curl $remote_path $disable_feed_credential || failed=true + elif machine_has "wget"; then + get_http_header_wget $remote_path $disable_feed_credential || failed=true + else + failed=true + fi + if [ "$failed" = true ]; then + say_verbose "Failed to get HTTP header: '$remote_path'." + return 1 + fi + return 0 +} + +# args: +# remote_path - $1 +# disable_feed_credential - $2 +get_http_header_curl() { + eval $invocation + local remote_path="$1" + local disable_feed_credential="$2" + + remote_path_with_credential="$remote_path" + if [ "$disable_feed_credential" = false ]; then + remote_path_with_credential+="$feed_credential" + fi + + curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 " + curl $curl_options "$remote_path_with_credential" 2>&1 || return 1 + return 0 +} + +# args: +# remote_path - $1 +# disable_feed_credential - $2 +get_http_header_wget() { + eval $invocation + local remote_path="$1" + local disable_feed_credential="$2" + local wget_options="-q -S --spider --tries 5 " + + local wget_options_extra='' + + # Test for options that aren't supported on all wget implementations. + if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then + wget_options_extra="--waitretry 2 --connect-timeout 15 " + else + say "wget extra options are unavailable for this environment" + fi + + remote_path_with_credential="$remote_path" + if [ "$disable_feed_credential" = false ]; then + remote_path_with_credential+="$feed_credential" + fi + + wget $wget_options $wget_options_extra "$remote_path_with_credential" 2>&1 + + return $? +} + +# args: +# remote_path - $1 +# [out_path] - $2 - stdout if not provided +download() { + eval $invocation + + local remote_path="$1" + local out_path="${2:-}" + + if [[ "$remote_path" != "http"* ]]; then + cp "$remote_path" "$out_path" + return $? + fi + + local failed=false + local attempts=0 + while [ $attempts -lt 3 ]; do + attempts=$((attempts+1)) + failed=false + if machine_has "curl"; then + downloadcurl "$remote_path" "$out_path" || failed=true + elif machine_has "wget"; then + downloadwget "$remote_path" "$out_path" || failed=true + else + say_err "Missing dependency: neither curl nor wget was found." + exit 1 + fi + + if [ "$failed" = false ] || [ $attempts -ge 3 ] || { [ -n "${http_code-}" ] && [ "${http_code}" = "404" ]; }; then + break + fi + + say "Download attempt #$attempts has failed: ${http_code-} ${download_error_msg-}" + say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds." + sleep $((attempts*10)) + done + + if [ "$failed" = true ]; then + say_verbose "Download failed: $remote_path" + return 1 + fi + return 0 +} + +# Updates global variables $http_code and $download_error_msg +downloadcurl() { + eval $invocation + unset http_code + unset download_error_msg + local remote_path="$1" + local out_path="${2:-}" + # Append feed_credential as late as possible before calling curl to avoid logging feed_credential + # Avoid passing URI with credentials to functions: note, most of them echoing parameters of invocation in verbose output. + local remote_path_with_credential="${remote_path}${feed_credential}" + local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs " + local curl_exit_code=0; + if [ -z "$out_path" ]; then + curl_output=$(curl $curl_options "$remote_path_with_credential" 2>&1) + curl_exit_code=$? + echo "$curl_output" + else + curl_output=$(curl $curl_options -o "$out_path" "$remote_path_with_credential" 2>&1) + curl_exit_code=$? + fi + + # Regression in curl causes curl with --retry to return a 0 exit code even when it fails to download a file - https://github.com/curl/curl/issues/17554 + if [ $curl_exit_code -eq 0 ] && echo "$curl_output" | grep -q "^curl: ([0-9]*) "; then + curl_exit_code=$(echo "$curl_output" | sed 's/curl: (\([0-9]*\)).*/\1/') + fi + + if [ $curl_exit_code -gt 0 ]; then + download_error_msg="Unable to download $remote_path." + # Check for curl timeout codes + if [[ $curl_exit_code == 7 || $curl_exit_code == 28 ]]; then + download_error_msg+=" Failed to reach the server: connection timeout." + else + local disable_feed_credential=false + local response=$(get_http_header_curl $remote_path $disable_feed_credential) + http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 ) + if [[ ! -z $http_code && $http_code != 2* ]]; then + download_error_msg+=" Returned HTTP status code: $http_code." + fi + fi + say_verbose "$download_error_msg" + return 1 + fi + return 0 +} + + +# Updates global variables $http_code and $download_error_msg +downloadwget() { + eval $invocation + unset http_code + unset download_error_msg + local remote_path="$1" + local out_path="${2:-}" + # Append feed_credential as late as possible before calling wget to avoid logging feed_credential + local remote_path_with_credential="${remote_path}${feed_credential}" + local wget_options="--tries 20 " + + local wget_options_extra='' + local wget_result='' + + # Test for options that aren't supported on all wget implementations. + if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then + wget_options_extra="--waitretry 2 --connect-timeout 15 " + else + say "wget extra options are unavailable for this environment" + fi + + if [ -z "$out_path" ]; then + wget -q $wget_options $wget_options_extra -O - "$remote_path_with_credential" 2>&1 + wget_result=$? + else + wget $wget_options $wget_options_extra -O "$out_path" "$remote_path_with_credential" 2>&1 + wget_result=$? + fi + + if [[ $wget_result != 0 ]]; then + local disable_feed_credential=false + local response=$(get_http_header_wget $remote_path $disable_feed_credential) + http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 ) + download_error_msg="Unable to download $remote_path." + if [[ ! -z $http_code && $http_code != 2* ]]; then + download_error_msg+=" Returned HTTP status code: $http_code." + # wget exit code 4 stands for network-issue + elif [[ $wget_result == 4 ]]; then + download_error_msg+=" Failed to reach the server: connection timeout." + fi + say_verbose "$download_error_msg" + return 1 + fi + + return 0 +} + +get_download_link_from_aka_ms() { + eval $invocation + + #quality is not supported for LTS or STS channel + #STS maps to current + if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "STS") ]]; then + normalized_quality="" + say_warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored." + fi + + say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'." + + #construct aka.ms link + aka_ms_link="https://aka.ms/dotnet" + if [ "$internal" = true ]; then + aka_ms_link="$aka_ms_link/internal" + fi + aka_ms_link="$aka_ms_link/$normalized_channel" + if [[ ! -z "$normalized_quality" ]]; then + aka_ms_link="$aka_ms_link/$normalized_quality" + fi + aka_ms_link="$aka_ms_link/$normalized_product-$normalized_os-$normalized_architecture.tar.gz" + say_verbose "Constructed aka.ms link: '$aka_ms_link'." + + #get HTTP response + #do not pass credentials as a part of the $aka_ms_link and do not apply credentials in the get_http_header function + #otherwise the redirect link would have credentials as well + #it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link + disable_feed_credential=true + response="$(get_http_header $aka_ms_link $disable_feed_credential)" + + say_verbose "Received response: $response" + # Get results of all the redirects. + http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' ) + # They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404). + broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' ) + # The response may end without final code 2xx/4xx/5xx somehow, e.g. network restrictions on www.bing.com causes redirecting to bing.com fails with connection refused. + # In this case it should not exclude the last. + last_http_code=$( echo "$http_codes" | tail -n 1 ) + if ! [[ $last_http_code =~ ^(2|4|5)[0-9][0-9]$ ]]; then + broken_redirects=$( echo "$http_codes" | grep -v '301' ) + fi + + # All HTTP codes are 301 (Moved Permanently), the redirect link exists. + if [[ -z "$broken_redirects" ]]; then + aka_ms_download_link=$( echo "$response" | awk '$1 ~ /^Location/{print $2}' | tail -1 | tr -d '\r') + + if [[ -z "$aka_ms_download_link" ]]; then + say_verbose "The aka.ms link '$aka_ms_link' is not valid: failed to get redirect location." + return 1 + fi + + say_verbose "The redirect location retrieved: '$aka_ms_download_link'." + return 0 + else + say_verbose "The aka.ms link '$aka_ms_link' is not valid: received HTTP code: $(echo "$broken_redirects" | paste -sd "," -)." + return 1 + fi +} + +get_feeds_to_use() +{ + feeds=( + "https://builds.dotnet.microsoft.com/dotnet" + "https://ci.dot.net/public" + ) + + if [[ -n "$azure_feed" ]]; then + feeds=("$azure_feed") + fi + + if [[ -n "$uncached_feed" ]]; then + feeds=("$uncached_feed") + fi +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed). +generate_download_links() { + + download_links=() + specific_versions=() + effective_versions=() + link_types=() + + # If generate_akams_links returns false, no fallback to old links. Just terminate. + # This function may also 'exit' (if the determined version is already installed). + generate_akams_links || return + + # Check other feeds only if we haven't been able to find an aka.ms link. + if [[ "${#download_links[@]}" -lt 1 ]]; then + for feed in ${feeds[@]} + do + # generate_regular_links may also 'exit' (if the determined version is already installed). + generate_regular_links $feed || return + done + fi + + if [[ "${#download_links[@]}" -eq 0 ]]; then + say_err "Failed to resolve the exact version number." + return 1 + fi + + say_verbose "Generated ${#download_links[@]} links." + for link_index in ${!download_links[@]} + do + say_verbose "Link $link_index: ${link_types[$link_index]}, ${effective_versions[$link_index]}, ${download_links[$link_index]}" + done +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed). +generate_akams_links() { + local valid_aka_ms_link=true; + + normalized_version="$(to_lowercase "$version")" + if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then + say_err "Quality and Version options are not allowed to be specified simultaneously. See https://learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details." + return 1 + fi + + if [[ -n "$json_file" || "$normalized_version" != "latest" ]]; then + # aka.ms links are not needed when exact version is specified via command or json file + return + fi + + get_download_link_from_aka_ms || valid_aka_ms_link=false + + if [[ "$valid_aka_ms_link" == true ]]; then + say_verbose "Retrieved primary payload URL from aka.ms link: '$aka_ms_download_link'." + say_verbose "Downloading using legacy url will not be attempted." + + download_link=$aka_ms_download_link + + #get version from the path + IFS='/' + read -ra pathElems <<< "$download_link" + count=${#pathElems[@]} + specific_version="${pathElems[count-2]}" + unset IFS; + say_verbose "Version: '$specific_version'." + + #Retrieve effective version + effective_version="$(get_specific_product_version "$azure_feed" "$specific_version" "$download_link")" + + # Add link info to arrays + download_links+=($download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("aka.ms") + + # Check if the SDK version is already installed. + if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "$asset_name with version '$effective_version' is already installed." + exit 0 + fi + + return 0 + fi + + # if quality is specified - exit with error - there is no fallback approach + if [ ! -z "$normalized_quality" ]; then + say_err "Failed to locate the latest version in the channel '$normalized_channel' with '$normalized_quality' quality for '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'." + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support." + return 1 + fi + say_verbose "Falling back to latest.version file approach." +} + +# THIS FUNCTION MAY EXIT (if the determined version is already installed) +# args: +# feed - $1 +generate_regular_links() { + local feed="$1" + local valid_legacy_download_link=true + + specific_version=$(get_specific_version_from_version "$feed" "$channel" "$normalized_architecture" "$version" "$json_file") || specific_version='0' + + if [[ "$specific_version" == '0' ]]; then + say_verbose "Failed to resolve the specific version number using feed '$feed'" + return + fi + + effective_version="$(get_specific_product_version "$feed" "$specific_version")" + say_verbose "specific_version=$specific_version" + + download_link="$(construct_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")" + say_verbose "Constructed primary named payload URL: $download_link" + + # Add link info to arrays + download_links+=($download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("primary") + + legacy_download_link="$(construct_legacy_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false + + if [ "$valid_legacy_download_link" = true ]; then + say_verbose "Constructed legacy named payload URL: $legacy_download_link" + + download_links+=($legacy_download_link) + specific_versions+=($specific_version) + effective_versions+=($effective_version) + link_types+=("legacy") + else + legacy_download_link="" + say_verbose "Could not construct a legacy_download_link; omitting..." + fi + + # Check if the SDK version is already installed. + if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "$asset_name with version '$effective_version' is already installed." + exit 0 + fi +} + +print_dry_run() { + + say "Payload URLs:" + + for link_index in "${!download_links[@]}" + do + say "URL #$link_index - ${link_types[$link_index]}: ${download_links[$link_index]}" + done + + resolved_version=${specific_versions[0]} + repeatable_command="./$script_name --version "\""$resolved_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\""" + + if [ ! -z "$normalized_quality" ]; then + repeatable_command+=" --quality "\""$normalized_quality"\""" + fi + + if [[ "$runtime" == "dotnet" ]]; then + repeatable_command+=" --runtime "\""dotnet"\""" + elif [[ "$runtime" == "aspnetcore" ]]; then + repeatable_command+=" --runtime "\""aspnetcore"\""" + fi + + repeatable_command+="$non_dynamic_parameters" + + if [ -n "$feed_credential" ]; then + repeatable_command+=" --feed-credential "\"""\""" + fi + + say "Repeatable invocation: $repeatable_command" +} + +calculate_vars() { + eval $invocation + + script_name=$(basename "$0") + normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")" + say_verbose "Normalized architecture: '$normalized_architecture'." + normalized_os="$(get_normalized_os "$user_defined_os")" + say_verbose "Normalized OS: '$normalized_os'." + normalized_quality="$(get_normalized_quality "$quality")" + say_verbose "Normalized quality: '$normalized_quality'." + normalized_channel="$(get_normalized_channel "$channel")" + say_verbose "Normalized channel: '$normalized_channel'." + normalized_product="$(get_normalized_product "$runtime")" + say_verbose "Normalized product: '$normalized_product'." + install_root="$(resolve_installation_path "$install_dir")" + say_verbose "InstallRoot: '$install_root'." + + normalized_architecture="$(get_normalized_architecture_for_specific_sdk_version "$version" "$normalized_channel" "$normalized_architecture")" + + if [[ "$runtime" == "dotnet" ]]; then + asset_relative_path="shared/Microsoft.NETCore.App" + asset_name=".NET Core Runtime" + elif [[ "$runtime" == "aspnetcore" ]]; then + asset_relative_path="shared/Microsoft.AspNetCore.App" + asset_name="ASP.NET Core Runtime" + elif [ -z "$runtime" ]; then + asset_relative_path="sdk" + asset_name=".NET Core SDK" + fi + + get_feeds_to_use +} + +install_dotnet() { + eval $invocation + local download_failed=false + local download_completed=false + local remote_file_size=0 + + mkdir -p "$install_root" + zip_path="${zip_path:-$(mktemp "$temporary_file_template")}" + say_verbose "Archive path: $zip_path" + + for link_index in "${!download_links[@]}" + do + download_link="${download_links[$link_index]}" + specific_version="${specific_versions[$link_index]}" + effective_version="${effective_versions[$link_index]}" + link_type="${link_types[$link_index]}" + + say "Attempting to download using $link_type link $download_link" + + # The download function will set variables $http_code and $download_error_msg in case of failure. + download_failed=false + download "$download_link" "$zip_path" 2>&1 || download_failed=true + + if [ "$download_failed" = true ]; then + case ${http_code-} in + 404) + say "The resource at $link_type link '$download_link' is not available." + ;; + *) + say "Failed to download $link_type link '$download_link': ${http_code-} ${download_error_msg-}" + ;; + esac + rm -f "$zip_path" 2>&1 && say_verbose "Temporary archive file $zip_path was removed" + else + download_completed=true + break + fi + done + + if [[ "$download_completed" == false ]]; then + say_err "Could not find \`$asset_name\` with version = $specific_version" + say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support" + return 1 + fi + + remote_file_size="$(get_remote_file_size "$download_link")" + + say "Extracting archive from $download_link" + extract_dotnet_package "$zip_path" "$install_root" "$remote_file_size" || return 1 + + # Check if the SDK version is installed; if not, fail the installation. + # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed. + if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then + IFS='-' + read -ra verArr <<< "$specific_version" + release_version="${verArr[0]}" + unset IFS; + say_verbose "Checking installation: version = $release_version" + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then + say "Installed version is $effective_version" + return 0 + fi + fi + + # Check if the standard SDK version is installed. + say_verbose "Checking installation: version = $effective_version" + if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then + say "Installed version is $effective_version" + return 0 + fi + + # Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm. + say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues." + say_err "\`$asset_name\` with version = $effective_version failed to install with an error." + return 1 +} + +args=("$@") + +local_version_file_relative_path="/.version" +bin_folder_relative_path="" +temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX" + +channel="LTS" +version="Latest" +json_file="" +install_dir="" +architecture="" +dry_run=false +no_path=false +azure_feed="" +uncached_feed="" +feed_credential="" +verbose=false +runtime="" +runtime_id="" +quality="" +internal=false +override_non_versioned_files=true +non_dynamic_parameters="" +user_defined_os="" + +while [ $# -ne 0 ] +do + name="$1" + case "$name" in + -c|--channel|-[Cc]hannel) + shift + channel="$1" + ;; + -v|--version|-[Vv]ersion) + shift + version="$1" + ;; + -q|--quality|-[Qq]uality) + shift + quality="$1" + ;; + --internal|-[Ii]nternal) + internal=true + non_dynamic_parameters+=" $name" + ;; + -i|--install-dir|-[Ii]nstall[Dd]ir) + shift + install_dir="$1" + ;; + --arch|--architecture|-[Aa]rch|-[Aa]rchitecture) + shift + architecture="$1" + ;; + --os|-[Oo][SS]) + shift + user_defined_os="$1" + ;; + --shared-runtime|-[Ss]hared[Rr]untime) + say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'." + if [ -z "$runtime" ]; then + runtime="dotnet" + fi + ;; + --runtime|-[Rr]untime) + shift + runtime="$1" + if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then + say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'." + if [[ "$runtime" == "windowsdesktop" ]]; then + say_err "WindowsDesktop archives are manufactured for Windows platforms only." + fi + exit 1 + fi + ;; + --dry-run|-[Dd]ry[Rr]un) + dry_run=true + ;; + --no-path|-[Nn]o[Pp]ath) + no_path=true + non_dynamic_parameters+=" $name" + ;; + --verbose|-[Vv]erbose) + verbose=true + non_dynamic_parameters+=" $name" + ;; + --azure-feed|-[Aa]zure[Ff]eed) + shift + azure_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --uncached-feed|-[Uu]ncached[Ff]eed) + shift + uncached_feed="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + ;; + --feed-credential|-[Ff]eed[Cc]redential) + shift + feed_credential="$1" + #feed_credential should start with "?", for it to be added to the end of the link. + #adding "?" at the beginning of the feed_credential if needed. + [[ -z "$(echo $feed_credential)" ]] || [[ $feed_credential == \?* ]] || feed_credential="?$feed_credential" + ;; + --runtime-id|-[Rr]untime[Ii]d) + shift + runtime_id="$1" + non_dynamic_parameters+=" $name "\""$1"\""" + say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead." + ;; + --jsonfile|-[Jj][Ss]on[Ff]ile) + shift + json_file="$1" + ;; + --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles) + override_non_versioned_files=false + non_dynamic_parameters+=" $name" + ;; + --keep-zip|-[Kk]eep[Zz]ip) + keep_zip=true + non_dynamic_parameters+=" $name" + ;; + --zip-path|-[Zz]ip[Pp]ath) + shift + zip_path="$1" + ;; + -?|--?|-h|--help|-[Hh]elp) + script_name="dotnet-install.sh" + echo ".NET Tools Installer" + echo "Usage:" + echo " # Install a .NET SDK of a given Quality from a given Channel" + echo " $script_name [-c|--channel ] [-q|--quality ]" + echo " # Install a .NET SDK of a specific public version" + echo " $script_name [-v|--version ]" + echo " $script_name -h|-?|--help" + echo "" + echo "$script_name is a simple command line interface for obtaining dotnet cli." + echo " Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" + echo " - The SDK needs to be installed without user interaction and without admin rights." + echo " - The SDK installation doesn't need to persist across multiple CI runs." + echo " To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer." + echo "" + echo "Options:" + echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`." + echo " -Channel" + echo " Possible values:" + echo " - STS - the most recent Standard Term Support release" + echo " - LTS - the most recent Long Term Support release" + echo " - 2-part version in a format A.B - represents a specific release" + echo " examples: 2.0; 1.0" + echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release" + echo " examples: 5.0.1xx, 5.0.2xx." + echo " Supported since 5.0 release" + echo " Warning: Value 'Current' is deprecated for the Channel parameter. Use 'STS' instead." + echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used." + echo " -v,--version Use specific VERSION, Defaults to \`$version\`." + echo " -Version" + echo " Possible values:" + echo " - latest - the latest build on specific channel" + echo " - 3-part version in a format A.B.C - represents specific version of build" + echo " examples: 2.0.0-preview2-006120; 1.1.0" + echo " -q,--quality Download the latest build of specified quality in the channel." + echo " -Quality" + echo " The possible values are: daily, preview, GA." + echo " Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used." + echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported." + echo " Supported since 5.0 release." + echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality." + echo " --internal,-Internal Download internal builds. Requires providing credentials via --feed-credential parameter." + echo " --feed-credential Token to access Azure feed. Used as a query string to append to the Azure feed." + echo " -FeedCredential This parameter typically is not specified." + echo " -i,--install-dir Install under specified location (see Install Location below)" + echo " -InstallDir" + echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`." + echo " --arch,-Architecture,-Arch" + echo " Possible values: x64, arm, arm64, s390x, ppc64le and loongarch64" + echo " --os Specifies operating system to be used when selecting the installer." + echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6." + echo " In case any other value is provided, the platform will be determined by the script based on machine configuration." + echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links." + echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information." + echo " --runtime Installs a shared runtime only, without the SDK." + echo " -Runtime" + echo " Possible values:" + echo " - dotnet - the Microsoft.NETCore.App shared runtime" + echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime" + echo " --dry-run,-DryRun Do not perform installation. Display download link." + echo " --no-path, -NoPath Do not set PATH for the current process." + echo " --verbose,-Verbose Display diagnostics information." + echo " --azure-feed,-AzureFeed For internal use only." + echo " Allows using a different storage to download SDK archives from." + echo " --uncached-feed,-UncachedFeed For internal use only." + echo " Allows using a different storage to download SDK archives from." + echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable." + echo " -SkipNonVersionedFiles" + echo " --jsonfile Determines the SDK version from a user specified global.json file." + echo " Note: global.json must have a value for 'SDK:Version'" + echo " --keep-zip,-KeepZip If set, downloaded file is kept." + echo " --zip-path, -ZipPath If set, downloaded file is stored at the specified path." + echo " -?,--?,-h,--help,-Help Shows this help message" + echo "" + echo "Install Location:" + echo " Location is chosen in following order:" + echo " - --install-dir option" + echo " - Environmental variable DOTNET_INSTALL_DIR" + echo " - $HOME/.dotnet" + exit 0 + ;; + *) + say_err "Unknown argument \`$name\`" + exit 1 + ;; + esac + + shift +done + +say_verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:" +say_verbose "- The SDK needs to be installed without user interaction and without admin rights." +say_verbose "- The SDK installation doesn't need to persist across multiple CI runs." +say_verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n" + +if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then + message="Provide credentials via --feed-credential parameter." + if [ "$dry_run" = true ]; then + say_warning "$message" + else + say_err "$message" + exit 1 + fi +fi + +check_min_reqs +calculate_vars +# generate_regular_links call below will 'exit' if the determined version is already installed. +generate_download_links + +if [[ "$dry_run" = true ]]; then + print_dry_run + exit 0 +fi + +install_dotnet + +bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")" +if [ "$no_path" = false ]; then + say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script." + export PATH="$bin_path":"$PATH" +else + say "Binaries of dotnet can be found in $bin_path" +fi + +say "Note that the script does not resolve dependencies during installation." +say "To check the list of dependencies, go to https://learn.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section." +say "Installation finished successfully." From 760fabd276377aedea9c56bc8ffc6462c18be202 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Sun, 14 Dec 2025 17:03:22 +0100 Subject: [PATCH 2/6] implement onedrive graph client and docs --- README.md | 60 ++++ .../Clients/GraphOneDriveClient.cs | 178 ++++++++- .../OneDriveStorage.cs | 2 +- .../CloudDrive/CloudDriveStorageTests.cs | 15 + .../CloudDrive/GraphOneDriveClientTests.cs | 339 ++++++++++++++++++ 5 files changed, 585 insertions(+), 9 deletions(-) create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs diff --git a/README.md b/README.md index 3cbc2a3..a614d0e 100644 --- a/README.md +++ b/README.md @@ -52,6 +52,66 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa | [ManagedCode.Storage.FileSystem](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.FileSystem.svg)](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | Local file system implementation for hybrid or on-premises workloads. | | [ManagedCode.Storage.Sftp](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Sftp.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | SFTP provider powered by SSH.NET for legacy and air-gapped environments. | +### Configuring OneDrive, Google Drive, and Dropbox + +> iCloud does not expose a public file API suitable for server-side integrations, so only Microsoft, Google, and Dropbox cloud drives are covered here. + +**OneDrive / Microsoft Graph** + +1. Create an app registration in Azure Active Directory (Entra ID) and record the **Application (client) ID**, **Directory (tenant) ID**, and a **client secret**. +2. Add the Microsoft Graph **Files.ReadWrite.All** delegated permission (or **Sites.ReadWrite.All** if you target SharePoint drives) and grant admin consent. +3. In your ASP.NET app, acquire a token via `ClientSecretCredential` or another `TokenCredential` and pass it to `new GraphServiceClient(credential, new[] { "https://graph.microsoft.com/.default" })`. +4. Register OneDrive storage with the Graph client and the drive/root you want to scope to: + + ```csharp + builder.Services.AddOneDriveStorageAsDefault(options => + { + options.GraphClient = graphClient; // from step 3 + options.DriveId = "me"; // or a specific drive ID + options.RootPath = "app-data"; // folder will be created when CreateContainerIfNotExists is true + options.CreateContainerIfNotExists = true; + }); + ``` + +5. If you need to impersonate a specific drive item, swap `DriveId` for the drive GUID returned by Graph. + +**Google Drive** + +1. In [Google Cloud Console](https://console.cloud.google.com/), create a project and enable the **Google Drive API**. +2. Configure an OAuth consent screen and create an **OAuth 2.0 Client ID** (Desktop or Web). Record the client ID and secret. +3. Exchange the OAuth code for a refresh token with the `https://www.googleapis.com/auth/drive.file` scope (or broader if necessary). +4. Add the Google Drive provider and feed the credentials to the options: + + ```csharp + builder.Services.AddGoogleDriveStorage(options => + { + options.ClientId = configuration["GoogleDrive:ClientId"]!; + options.ClientSecret = configuration["GoogleDrive:ClientSecret"]!; + options.RefreshToken = configuration["GoogleDrive:RefreshToken"]!; + options.RootFolderId = "root"; // or a shared drive folder id + }); + ``` + +5. Store tokens in user secrets or environment variables; never commit them to source control. + +**Dropbox** + +1. Create an app in the [Dropbox App Console](https://www.dropbox.com/developers/apps) and choose **Scoped access** with the **Full Dropbox** or **App folder** type. +2. Under **Permissions**, enable `files.content.write`, `files.content.read`, and `files.metadata.write` and generate a refresh token via OAuth. +3. Register Dropbox storage with the access credentials and a root path (use `/` for full access apps or `/Apps/` for app folders): + + ```csharp + builder.Services.AddDropboxStorage(options => + { + options.AppKey = configuration["Dropbox:AppKey"]!; + options.AppSecret = configuration["Dropbox:AppSecret"]!; + options.RefreshToken = configuration["Dropbox:RefreshToken"]!; + options.RootPath = "/apps/my-app"; + }); + ``` + +4. Dropbox issues short-lived access tokens from refresh tokens; the SDK handles the exchange automatically once configured. + ### ASP.NET & Clients | Package | Latest | Description | diff --git a/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs index 81dc769..42228f7 100644 --- a/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs +++ b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs @@ -22,37 +22,199 @@ public GraphOneDriveClient(GraphServiceClient graphServiceClient) public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) { - // Graph-backed provisioning is not executed in this offline wrapper. - return Task.CompletedTask; + return EnsureRootInternalAsync(driveId, rootPath, createIfNotExists, cancellationToken); } public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) { - throw new NotSupportedException("Graph upload requires a configured OneDrive runtime environment."); + return UploadInternalAsync(driveId, path, content, contentType, cancellationToken); } public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken) { - throw new NotSupportedException("Graph download requires a configured OneDrive runtime environment."); + return DownloadInternalAsync(driveId, path, cancellationToken); } public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken) { - throw new NotSupportedException("Graph deletion requires a configured OneDrive runtime environment."); + return DeleteInternalAsync(driveId, path, cancellationToken); } public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken) { - return Task.FromResult(false); + return ExistsInternalAsync(driveId, path, cancellationToken); } public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken) { - return Task.FromResult(null); + return GetMetadataInternalAsync(driveId, path, cancellationToken); } public IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken) { - return AsyncEnumerable.Empty(); + return ListInternalAsync(driveId, directory, cancellationToken); + } + + private async Task EnsureRootInternalAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) + { + var normalizedRoot = NormalizePath(rootPath); + if (string.IsNullOrWhiteSpace(normalizedRoot) || normalizedRoot == "/") + { + return; + } + + var root = await GetRootDriveItemAsync(driveId, cancellationToken).ConfigureAwait(false); + var parentId = root.Id ?? throw new InvalidOperationException("Drive root is unavailable for the configured account."); + var segments = normalizedRoot.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var segment in segments) + { + cancellationToken.ThrowIfCancellationRequested(); + + var existing = await FindChildAsync(driveId, parentId, segment, cancellationToken).ConfigureAwait(false); + if (existing != null) + { + parentId = existing.Id!; + continue; + } + + if (!createIfNotExists) + { + throw new DirectoryNotFoundException($"Folder '{normalizedRoot}' is missing in the configured drive."); + } + + var childrenBuilder = await GetChildrenBuilderAsync(driveId, parentId, cancellationToken).ConfigureAwait(false); + var created = await childrenBuilder.PostAsync(new DriveItem + { + Name = segment, + Folder = new Folder() + }, cancellationToken: cancellationToken).ConfigureAwait(false); + + parentId = created?.Id ?? throw new InvalidOperationException($"Failed to create OneDrive folder '{segment}'."); + } + } + + private async Task UploadInternalAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + { + var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false); + var request = rootBuilder.ItemWithPath(NormalizePath(path)).Content; + var response = await request.PutAsync(content, cancellationToken: cancellationToken).ConfigureAwait(false); + + return response ?? throw new InvalidOperationException("Graph upload returned no item."); + } + + private async Task DownloadInternalAsync(string driveId, string path, CancellationToken cancellationToken) + { + var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false); + var request = rootBuilder.ItemWithPath(NormalizePath(path)).Content; + var stream = await request.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + return stream ?? throw new FileNotFoundException($"File '{path}' not found in OneDrive."); + } + + private async Task DeleteInternalAsync(string driveId, string path, CancellationToken cancellationToken) + { + try + { + var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false); + await rootBuilder.ItemWithPath(NormalizePath(path)).DeleteAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + return true; + } + catch (ODataError ex) when (ex.ResponseStatusCode == 404) + { + return false; + } + } + + private async Task ExistsInternalAsync(string driveId, string path, CancellationToken cancellationToken) + { + var item = await GetMetadataInternalAsync(driveId, path, cancellationToken).ConfigureAwait(false); + return item != null; + } + + private async Task GetMetadataInternalAsync(string driveId, string path, CancellationToken cancellationToken) + { + try + { + var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false); + return await rootBuilder.ItemWithPath(NormalizePath(path)).GetAsync(cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + catch (ODataError ex) when (ex.ResponseStatusCode == 404) + { + return null; + } + } + + private async IAsyncEnumerable ListInternalAsync(string driveId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) + { + var normalized = string.IsNullOrWhiteSpace(directory) ? null : NormalizePath(directory!); + var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false); + var parent = normalized == null + ? await _graphServiceClient.Drives[resolvedDriveId].Root.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false) + : await _graphServiceClient.Drives[resolvedDriveId].Root.ItemWithPath(normalized).GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + + if (parent?.Id == null) + { + yield break; + } + + var builder = _graphServiceClient.Drives[resolvedDriveId].Items[parent.Id].Children; + var page = await builder.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + if (page?.Value == null) + { + yield break; + } + + foreach (var item in page.Value) + { + cancellationToken.ThrowIfCancellationRequested(); + if (item != null) + { + yield return item; + } + } + } + + private async Task GetRootItemBuilderAsync(string driveId, CancellationToken cancellationToken) + { + var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false); + return _graphServiceClient.Drives[resolvedDriveId].Root; + } + + private async Task GetChildrenBuilderAsync(string driveId, string parentId, CancellationToken cancellationToken) + { + var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false); + return _graphServiceClient.Drives[resolvedDriveId].Items[parentId].Children; + } + + private async Task GetRootDriveItemAsync(string driveId, CancellationToken cancellationToken) + { + var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false); + var root = await _graphServiceClient.Drives[resolvedDriveId].Root.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + return root ?? throw new InvalidOperationException("Drive root is unavailable for the configured account."); + } + + private async Task ResolveDriveIdAsync(string driveId, CancellationToken cancellationToken) + { + if (!driveId.Equals("me", StringComparison.OrdinalIgnoreCase)) + { + return driveId; + } + + var drive = await _graphServiceClient.Me.Drive.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + return drive?.Id ?? throw new InvalidOperationException("Unable to resolve the current user's drive id."); + } + + private async Task FindChildAsync(string driveId, string parentId, string name, CancellationToken cancellationToken) + { + var childrenBuilder = await GetChildrenBuilderAsync(driveId, parentId, cancellationToken).ConfigureAwait(false); + var children = await childrenBuilder.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + + return children?.Value?.FirstOrDefault(c => string.Equals(c?.Name, name, StringComparison.OrdinalIgnoreCase)); + } + + private static string NormalizePath(string path) + { + return path.Replace("\\", "/").Trim('/'); } } diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs index c6ead31..4f3d59d 100644 --- a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs +++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs @@ -57,7 +57,7 @@ protected override async Task CreateContainerInternalAsync(CancellationT public override Task RemoveContainerAsync(CancellationToken cancellationToken = default) { // OneDrive containers map to drives or root folders that are typically managed by the account owner. - return Task.FromResult(Result.Succeed()); + return Task.FromResult(Result.Fail(new NotSupportedException("Deleting a OneDrive container is not supported."))); } protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs index c21ebf2..520f864 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs @@ -57,6 +57,21 @@ public async Task OneDrive_FakeClient_RoundTrip() listed.ShouldContain(m => m.FullName.EndsWith("text.txt")); } + [Fact] + public async Task OneDrive_RemoveContainer_NotSupported() + { + var fakeClient = new FakeOneDriveClient(); + var storage = new OneDriveStorage(new OneDriveStorageOptions + { + Client = fakeClient, + DriveId = "drive", + RootPath = "root" + }); + + var result = await storage.RemoveContainerAsync(); + result.IsSuccess.ShouldBeFalse(); + } + [Fact] public async Task GoogleDrive_FakeClient_RoundTrip() { diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs new file mode 100644 index 0000000..429d6b2 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs @@ -0,0 +1,339 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Azure; +using Azure.Core; +using ManagedCode.Storage.OneDrive.Clients; +using Microsoft.Graph; +using Microsoft.Graph.Models; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class GraphOneDriveClientTests +{ + private const string RootKey = "root"; + + [Fact] + public async Task GraphClient_EndToEnd() + { + var handler = new FakeGraphHandler(); + var client = CreateGraphClient(handler); + var storageClient = new GraphOneDriveClient(client); + + await storageClient.EnsureRootAsync("me", "work", true, CancellationToken.None); + + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("graph payload"))) + { + var uploaded = await storageClient.UploadAsync("me", "work/doc.txt", uploadStream, "text/plain", CancellationToken.None); + uploaded.Name.ShouldBe("doc.txt"); + } + + (await storageClient.ExistsAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeTrue(); + + var metadata = await storageClient.GetMetadataAsync("me", "work/doc.txt", CancellationToken.None); + metadata.ShouldNotBeNull(); + metadata!.Size.ShouldBe((long)"graph payload".Length); + + await using (var downloaded = await storageClient.DownloadAsync("me", "work/doc.txt", CancellationToken.None)) + using (var reader = new StreamReader(downloaded)) + { + (await reader.ReadToEndAsync()).ShouldBe("graph payload"); + } + + var listed = new List(); + await foreach (var item in storageClient.ListAsync("me", "work", CancellationToken.None)) + { + listed.Add(item); + } + + listed.ShouldContain(i => i.Name == "doc.txt"); + + (await storageClient.DeleteAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeTrue(); + (await storageClient.ExistsAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeFalse(); + } + + private static GraphServiceClient CreateGraphClient(HttpMessageHandler handler) + { + var scopes = new[] { "https://graph.microsoft.com/.default" }; + var credential = new FakeTokenCredential(); + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://graph.microsoft.com/v1.0") + }; + + return new GraphServiceClient(httpClient, credential, scopes, "https://graph.microsoft.com/v1.0"); + } + + private sealed class FakeTokenCredential : TokenCredential + { + public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken) + { + return new AccessToken("test-token", DateTimeOffset.UtcNow.AddHours(1)); + } + + public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken) + { + return new ValueTask(GetToken(requestContext, cancellationToken)); + } + } + + private sealed class FakeGraphHandler : HttpMessageHandler + { + private readonly Dictionary _entries = new(StringComparer.OrdinalIgnoreCase) + { + [RootKey] = GraphEntry.Root() + }; + + protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (IsRootRequest(request.RequestUri)) + { + return Task.FromResult(JsonResponse(_entries[RootKey])); + } + + if (TryHandleChildrenRequest(request, out var childrenResponse)) + { + return Task.FromResult(childrenResponse); + } + + if (TryHandleItemRequest(request, out var itemResponse)) + { + return Task.FromResult(itemResponse); + } + + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)); + } + + private bool TryHandleItemRequest(HttpRequestMessage request, out HttpResponseMessage response) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + var segments = request.RequestUri!.AbsolutePath.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + var contentSegment = segments.FirstOrDefault(s => s.EndsWith(":content", StringComparison.OrdinalIgnoreCase)); + + if (contentSegment != null) + { + var path = DecodePath(contentSegment.Replace(":content", string.Empty, StringComparison.OrdinalIgnoreCase)); + return HandleContentRequest(request, path, ref response); + } + + var itemWithPath = segments.FirstOrDefault(s => s.Contains(':')); + if (itemWithPath != null) + { + var path = DecodePath(itemWithPath.Trim(':')); + return HandleMetadataRequest(request.Method, path, ref response); + } + + return false; + } + + private bool HandleMetadataRequest(HttpMethod method, string path, ref HttpResponseMessage response) + { + var entry = _entries.Values.FirstOrDefault(v => string.Equals(v.Path, path, StringComparison.OrdinalIgnoreCase)); + if (method == HttpMethod.Delete) + { + if (entry == null) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + return true; + } + + _entries.Remove(entry.Id); + response = new HttpResponseMessage(HttpStatusCode.NoContent); + return true; + } + + if (entry == null) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + return true; + } + + response = JsonResponse(entry); + return true; + } + + private bool HandleContentRequest(HttpRequestMessage request, string path, ref HttpResponseMessage response) + { + if (request.Method == HttpMethod.Put) + { + var parentPath = Path.GetDirectoryName(path)?.Replace("\\", "/").Trim('/') ?? string.Empty; + EnsureFolder(parentPath); + + var buffer = request.Content!.ReadAsStream(); + using var memory = new MemoryStream(); + buffer.CopyTo(memory); + var entry = GraphEntry.File(Path.GetFileName(path), parentPath, memory.ToArray()); + _entries[entry.Id] = entry; + response = JsonResponse(entry); + return true; + } + + var existing = _entries.Values.FirstOrDefault(v => string.Equals(v.Path, path, StringComparison.OrdinalIgnoreCase)); + if (existing == null) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + return true; + } + + response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(existing.Content) + }; + + return true; + } + + private bool TryHandleChildrenRequest(HttpRequestMessage request, out HttpResponseMessage response) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + var path = request.RequestUri!.AbsolutePath; + if (!path.EndsWith("/children", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + var idSegment = path.Contains("items", StringComparison.OrdinalIgnoreCase) + ? path.Split('/', StringSplitOptions.RemoveEmptyEntries).SkipWhile(s => !s.Equals("items", StringComparison.OrdinalIgnoreCase)).Skip(1).FirstOrDefault() + : RootKey; + + if (request.Method == HttpMethod.Post) + { + var body = request.Content!.ReadAsStringAsync().GetAwaiter().GetResult(); + var item = JsonSerializer.Deserialize(body, new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true + }); + + var created = GraphEntry.Folder(item!.Name!, parentPath: _entries[idSegment ?? RootKey].Path); + _entries[created.Id] = created; + response = JsonResponse(created, HttpStatusCode.Created); + return true; + } + + var children = _entries.Values.Where(e => string.Equals(e.ParentPath, _entries[idSegment ?? RootKey].Path, StringComparison.OrdinalIgnoreCase)).ToList(); + response = JsonResponse(new DriveItemCollectionResponse + { + Value = children.Select(GraphEntry.ToDriveItem).ToList() + }); + + return true; + } + + private static bool IsRootRequest(Uri? requestUri) + { + return requestUri != null && requestUri.AbsolutePath.TrimEnd('/').EndsWith("me/drive/root", StringComparison.OrdinalIgnoreCase); + } + + private void EnsureFolder(string path) + { + var normalized = path.Trim('/'); + if (string.IsNullOrWhiteSpace(normalized)) + { + return; + } + + if (_entries.Values.Any(e => string.Equals(e.Path, normalized, StringComparison.OrdinalIgnoreCase))) + { + return; + } + + var parentPath = Path.GetDirectoryName(normalized)?.Replace("\\", "/").Trim('/') ?? string.Empty; + EnsureFolder(parentPath); + + var folder = GraphEntry.Folder(Path.GetFileName(normalized), parentPath); + _entries[folder.Id] = folder; + } + + private static string DecodePath(string segment) + { + return Uri.UnescapeDataString(segment.Replace("root:", string.Empty, StringComparison.OrdinalIgnoreCase)).Trim('/'); + } + + private static HttpResponseMessage JsonResponse(object content, HttpStatusCode status = HttpStatusCode.OK) + { + var response = new HttpResponseMessage(status) + { + Content = new StringContent(JsonSerializer.Serialize(content)) + }; + + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); + return response; + } + } + + private sealed class GraphEntry + { + public required string Id { get; init; } + public required string Name { get; init; } + public required string Path { get; init; } + public required string ParentPath { get; init; } + public byte[] Content { get; init; } = Array.Empty(); + public bool IsFolder { get; init; } + + public static GraphEntry Root() + { + return new GraphEntry + { + Id = RootKey, + Name = "root", + Path = string.Empty, + ParentPath = string.Empty, + IsFolder = true + }; + } + + public static GraphEntry Folder(string name, string parentPath) + { + var normalizedParent = parentPath.Trim('/'); + var path = string.IsNullOrWhiteSpace(normalizedParent) ? name : $"{normalizedParent}/{name}"; + return new GraphEntry + { + Id = Guid.NewGuid().ToString("N"), + Name = name, + Path = path, + ParentPath = normalizedParent, + IsFolder = true + }; + } + + public static GraphEntry File(string name, string parentPath, byte[] content) + { + var normalizedParent = parentPath.Trim('/'); + var path = string.IsNullOrWhiteSpace(normalizedParent) ? name : $"{normalizedParent}/{name}"; + return new GraphEntry + { + Id = Guid.NewGuid().ToString("N"), + Name = name, + Path = path, + ParentPath = normalizedParent, + Content = content, + IsFolder = false + }; + } + + public static DriveItem ToDriveItem(GraphEntry entry) + { + return new DriveItem + { + Id = entry.Id, + Name = entry.Name, + Size = entry.Content.LongLength, + CreatedDateTime = DateTimeOffset.UtcNow, + LastModifiedDateTime = DateTimeOffset.UtcNow, + File = entry.IsFolder ? null : new Microsoft.Graph.Models.FileObject(), + Folder = entry.IsFolder ? new Folder() : null + }; + } + } +} From 3ec4bfa0f367965566a416528e9cfb2c8daa3486 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Sun, 14 Dec 2025 23:17:22 +0100 Subject: [PATCH 3/6] more work and fixes --- AGENTS.md | 198 +++++- ManagedCode.Storage.slnx | 3 +- README.md | 230 ++++++- .../Clients/CloudKitClient.cs | 598 ++++++++++++++++++ .../Clients/CloudKitRecord.cs | 14 + .../Clients/ICloudKitClient.cs | 22 + .../CloudKitStorage.cs | 279 ++++++++ .../CloudKitStorageProvider.cs | 49 ++ .../Extensions/ServiceCollectionExtensions.cs | 130 ++++ .../ICloudKitStorage.cs | 10 + .../ManagedCode.Storage.CloudKit.csproj | 20 + .../Options/CloudKitDatabase.cs | 9 + .../Options/CloudKitEnvironment.cs | 8 + .../Options/CloudKitStorageOptions.cs | 66 ++ .../Clients/DropboxClientWrapper.cs | 47 +- .../DropboxStorage.cs | 19 +- .../Extensions/ServiceCollectionExtensions.cs | 93 +++ .../ManagedCode.Storage.Dropbox.csproj | 4 +- .../Clients/GoogleDriveClient.cs | 84 ++- .../Extensions/ServiceCollectionExtensions.cs | 93 +++ .../GoogleDriveStorage.cs | 13 +- .../ManagedCode.Storage.GoogleDrive.csproj | 4 +- .../Extensions/ServiceCollectionExtensions.cs | 98 +++ .../ManagedCode.Storage.OneDrive.csproj | 4 +- .../OneDriveStorage.cs | 30 +- .../ManagedCode.Storage.Tests.csproj | 21 +- .../CloudDriveDependencyInjectionTests.cs | 214 +++++++ .../CloudDriveStorageProviderTests.cs | 184 ++++++ .../CloudDrive/CloudDriveStorageTests.cs | 226 ++++++- .../DropboxClientWrapperHttpTests.cs | 399 ++++++++++++ .../CloudDrive/GoogleDriveClientHttpTests.cs | 339 ++++++++++ .../CloudDrive/GraphOneDriveClientTests.cs | 129 +++- .../CloudKit/CloudKitClientHttpTests.cs | 72 +++ .../CloudKitDependencyInjectionTests.cs | 71 +++ .../CloudKit/CloudKitStorageProviderTests.cs | 73 +++ .../Storages/CloudKit/CloudKitStorageTests.cs | 58 ++ .../CloudKit/FakeCloudKitHttpHandler.cs | 271 ++++++++ 37 files changed, 4018 insertions(+), 164 deletions(-) create mode 100644 Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitRecord.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/Clients/ICloudKitClient.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/CloudKitStorage.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/CloudKitStorageProvider.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/Extensions/ServiceCollectionExtensions.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/ICloudKitStorage.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/ManagedCode.Storage.CloudKit.csproj create mode 100644 Storages/ManagedCode.Storage.CloudKit/Options/CloudKitDatabase.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/Options/CloudKitEnvironment.cs create mode 100644 Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs create mode 100644 Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs create mode 100644 Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs create mode 100644 Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitDependencyInjectionTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageProviderTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageTests.cs create mode 100644 Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs diff --git a/AGENTS.md b/AGENTS.md index 6c5152f..78fe2be 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -1,37 +1,179 @@ -## Conversations -any resulting updates to agents.md should go under the section "## Rules to follow" -When you see a convincing argument from me on how to solve or do something. add a summary for this in agents.md. so you learn what I want over time. -If I say any of the following point, you do this: add the context to agents.md, and associate this with a specific type of task. -if I say "never do x" in some way. -if I say "always do x" in some way. -if I say "the process is x" in some way. -If I tell you to remember something, you do the same, update +ManagedCode.Storage — .NET 10 +Follows [MCAF](https://mcaf.managed-code.com/) -## Rules to follow -- Ensure storage-related changes keep broad automated coverage around 85-90% using generic, provider-agnostic tests across file systems, storages, and integrations. -- Deliver ASP.NET integrations that expose upload/download controllers, SignalR streaming, and matching HTTP and SignalR clients built on the storage layer for files, streams, and chunked transfers. -- Provide base ASP.NET controllers with minimal routing so consumers can inherit and customize routes, authorization, and behaviors without rigid defaults. -- Favor controller extension patterns and optionally expose interfaces to guide consumers on recommended actions so they can implement custom endpoints easily. -- For comprehensive storage platform upgrades, follow the nine-step flow: solidify SignalR streaming hub/client with logging and tests, harden controller upload paths (standard/stream/chunked) with large-file coverage, add keyed DI registrations and cross-provider sync fixtures, extend VFS with keyed support and >1 GB trials, create streamed large-file/CRC helpers, run end-to-end suites (controllers, SignalR, VFS, cross-provider), verify Blazor upload extensions, expand docs with VFS + provider identity guidance + keyed samples, and finish by running the full preview-enabled test suite addressing warnings. -- Normalise MIME lookups through `MimeHelper`; avoid ad-hoc MIME resolution helpers so all content-type logic flows through its APIs. +--- -# Repository Guidelines +## Conversations (Self-Learning) -## Project Structure & Module Organization -ManagedCode.Storage.slnx orchestrates the .NET 9 projects. Core abstractions live in `ManagedCode.Storage.Core/`. Providers sit under `Storages/ManagedCode.Storage.*` with one project per cloud target (Azure, AWS, GCP, FileSystem, Sftp). Integration surfaces, including the ASP.NET server and client SDKs, live in `Integraions/`. Test doubles stay in `ManagedCode.Storage.TestFakes/`, while the suites in `Tests/ManagedCode.Storage.Tests/` are grouped into ASP.NET flows, provider runs, and shared helpers. Keep shared assets such as `logo.png` at the repository root. +Learn the user's habits, preferences, and working style. Extract rules from conversations, save to "## Rules to follow", and generate code according to the user's personal rules. -## Build, Test, and Development Commands -Run `dotnet restore ManagedCode.Storage.slnx` before compiling. Use `dotnet build ManagedCode.Storage.slnx` to compile every target and surface analyzer warnings. Execute all tests with `dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release`. For coverage, run `dotnet test /p:CollectCoverage=true /p:CoverletOutput=coverage /p:CoverletOutputFormat=opencover`. Use `dotnet format ManagedCode.Storage.slnx` before opening a pull request. +**Update requirement (core mechanism):** -## Coding Style & Naming Conventions -Follow standard C# conventions: 4-space indentation, PascalCase types, camelCase locals, and suffix async APIs with `Async`. Nullability is enabled repository-wide, so annotate optional members and avoid the suppression operator unless justified. Match method names to existing patterns such as `DownloadFile_WhenFileExists_ReturnsSuccess`. Remove unused usings and let analyzers guide layout. +Before doing ANY task, evaluate the latest user message. +If you detect a new rule, correction, preference, or change → update `AGENTS.md` first. +Only after updating the file you may produce the task output. +If no new rule is detected → do not update the file. -## Testing Guidelines -Tests use xUnit and Shouldly; choose `[Fact]` for atomic cases and `[Theory]` for data-driven permutations. Place provider suites under `Tests/ManagedCode.Storage.Tests/Storages/` and reuse `.../Common/` helpers to spin up Testcontainers (Azurite, LocalStack, FakeGcsServer). Add fakes or harnesses mirroring `ManagedCode.Storage.TestFakes/` when introducing new providers. Always run `dotnet test` locally and exercise critical upload/download paths. +**When to extract rules:** -## Commit & Pull Request Guidelines -Write commit subjects in the imperative mood (`add ftp retry policy`) and keep them provider-scoped. Group related edits in one commit and avoid WIP spam. Pull requests should summarize impact, list touched projects, reference issues, and note new configuration or secrets. Include the `dotnet` commands you ran and add logs when CI needs context. +- prohibition words (never, don't, stop, avoid) or similar → add NEVER rule +- requirement words (always, must, make sure, should) or similar → add ALWAYS rule +- memory words (remember, keep in mind, note that) or similar → add rule +- process words (the process is, the workflow is, we do it like) or similar → add to workflow +- convincing argument about approach → capture as a rule (include why) +- future words (from now on, going forward) or similar → add permanent rule -## Security & Configuration Tips -Never commit API keys, connection strings, or `.trx` artifacts; rely on environment variables or user secrets. Document minimal permissions and default container expectations for new providers. Ensure server integrations stay authenticated and refresh configuration examples in `README.md` when behavior changes. +**Preferences → add to Preferences section:** + +- positive (I like, I prefer, this is better) or similar → Likes +- negative (I don't like, I hate, this is bad) or similar → Dislikes +- comparison (prefer X over Y, use X instead of Y) or similar → preference rule + +**Corrections → update or add rule:** + +- error indication (this is wrong, incorrect, broken) or similar → fix and add rule +- repetition frustration (don't do this again, you ignored, you missed) or similar → emphatic rule +- manual fixes by user → extract what changed and why + +**Strong signal (add IMMEDIATELY):** + +- swearing, frustration, anger, sarcasm → critical rule +- ALL CAPS, excessive punctuation (!!!, ???) → high priority +- same mistake twice → permanent emphatic rule +- user undoes your changes → understand why, prevent + +**Ignore (do NOT add):** + +- temporary scope (only for now, just this time, for this task) or similar +- one-off exceptions +- context-specific instructions for current task only + +**Rule format:** + +- One instruction per bullet +- Tie to category (Testing, Code, Docs, etc.) +- Capture WHY, not just what +- Remove obsolete rules when superseded + +--- + +## Rules to follow (Mandatory, no exceptions) + +### Commands + +- restore: `dotnet restore ManagedCode.Storage.slnx` +- build: `dotnet build ManagedCode.Storage.slnx` +- test: `dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release` +- coverage: `dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release /p:CollectCoverage=true /p:CoverletOutput=coverage /p:CoverletOutputFormat=opencover` +- format: `dotnet format ManagedCode.Storage.slnx` + +### Task Delivery (ALL TASKS) + +- Read assignment, inspect code and docs before planning +- Write multi-step plan before implementation +- Implement code and tests together +- Run tests in layers: new → related suite → broader regressions +- After all tests pass: run format, then build +- Summarize changes and test results before marking complete +- Always run required builds and tests yourself; do not ask the user to execute them (explicit user directive) + +### Documentation (ALL TASKS) + +- Docs live in `docs/` and `README.md` +- Update docs when behaviour changes +- Update configuration examples when required +- When adding new projects/providers, ensure `README.md` clearly documents installation, DI wiring, and basic usage examples +- Where feasible, prefer provider options that can build vendor SDK clients from credentials (to reduce consumer boilerplate) while still allowing client injection for advanced scenarios +- For providers that rely on vendor SDK clients (Graph/Drive/Dropbox/etc.), document how to obtain credentials/keys/tokens and include a minimal code snippet that builds the required SDK client instance + +### Testing (ALL TASKS) + +- Every behaviour change needs sufficient automated tests to cover its cases; one is the minimum, not the target +- Each public API endpoint has at least one test; complex endpoints have tests for different inputs and errors +- Integration tests must exercise real flows end-to-end, not just call endpoints in isolation +- Prefer integration/API tests over unit tests +- Keep mocks to an absolute minimum; prefer real flows using fakes/containers where possible +- Never write tests that only validate mocked interactions; every test must assert concrete, observable behaviour (state, output, errors, side-effects) +- When faking external APIs, match the official API docs (endpoints, status codes, error payloads, and field naming) and prefer `HttpMessageHandler`-based fakes over ad-hoc mocks +- No mocks for internal systems (DB, queues, caches) — use containers/fakes as appropriate +- Mocks only for external third-party systems +- Never delete or weaken a test to make it pass +- Each test verifies a real flow or scenario; tests without meaningful assertions are forbidden +- Check coverage to find gaps, not to chase numbers +- Tests use xUnit + Shouldly; choose `[Fact]` for atomic cases and `[Theory]` for data-driven permutations +- Place provider suites under `Tests/ManagedCode.Storage.Tests/Storages/` and reuse `Tests/ManagedCode.Storage.Tests/Common/` helpers to spin up Testcontainers (Azurite, LocalStack, FakeGcsServer) +- Add fakes or harnesses in `ManagedCode.Storage.TestFakes/` when introducing new providers + +### Storage Platform (ALL TASKS) + +- Ensure storage-related changes keep broad automated coverage around 85-90% using generic, provider-agnostic tests across file systems, storages, and integrations +- Deliver ASP.NET integrations that expose upload/download controllers, SignalR streaming, and matching HTTP and SignalR clients built on the storage layer for files, streams, and chunked transfers +- Provide base ASP.NET controllers with minimal routing so consumers can inherit and customize routes, authorization, and behaviors without rigid defaults +- Favor controller extension patterns and optionally expose interfaces to guide consumers on recommended actions so they can implement custom endpoints easily +- For comprehensive storage platform upgrades, follow the nine-step flow: solidify SignalR streaming hub/client with logging and tests, harden controller upload paths (standard/stream/chunked) with large-file coverage, add keyed DI registrations and cross-provider sync fixtures, extend VFS with keyed support and >1 GB trials, create streamed large-file/CRC helpers, run end-to-end suites (controllers, SignalR, VFS, cross-provider), verify Blazor upload extensions, expand docs with VFS + provider identity guidance + keyed samples, and finish by running the full preview-enabled test suite addressing warnings +- Normalise MIME lookups through `MimeHelper`; avoid ad-hoc MIME resolution helpers so all content-type logic flows through its APIs + +### Project Structure + +- `ManagedCode.Storage.slnx` orchestrates the .NET 10 projects +- Core abstractions: `ManagedCode.Storage.Core/` +- Virtual file system: `ManagedCode.Storage.VirtualFileSystem/` +- Providers: `Storages/ManagedCode.Storage.*` (one project per cloud target: Azure, AWS, GCP, FileSystem, Sftp) +- Integrations (ASP.NET server + client SDKs): `Integraions/` +- Test doubles: `ManagedCode.Storage.TestFakes/` +- Test suites: `Tests/ManagedCode.Storage.Tests/` (ASP.NET flows, provider runs, shared helpers) +- Keep shared assets such as `logo.png` at repository root + +### Autonomy + +- Start work immediately — no permission seeking +- Questions only for architecture blockers not covered by ADR +- Report only when task is complete + +### Code Style + +- Style rules: `.editorconfig` +- Follow standard C# conventions: 4-space indentation, PascalCase types, camelCase locals +- Nullability is enabled: annotate optional members; avoid `!` unless justified +- Suffix async APIs with `Async`; keep test names aligned with existing patterns (e.g., `DownloadFile_WhenFileExists_ReturnsSuccess`) +- Remove unused usings and let analyzers guide layout +- No magic literals — extract to constants, enums, or config when it improves clarity + +### Git & PRs + +- Write commit subjects in the imperative mood (`add ftp retry policy`) and keep them provider-scoped +- Group related edits in one commit and avoid WIP spam +- PRs should summarize impact, list touched projects, reference issues, and note new configuration or secrets +- Include the `dotnet` commands you ran and add logs when CI needs context + +### Critical (NEVER violate) + +- Never commit secrets, keys, access tokens, or connection strings +- Never commit `.trx` artifacts +- Never mock internal systems in integration tests (DB, queues, caches) — use containers/fakes instead +- Never skip tests to make PR green +- Never force push to `main` +- Never approve or merge (human decision) + +### Boundaries + +**Always:** + +- Read `AGENTS.md` and relevant docs before editing code +- Run tests before commit + +**Ask first:** + +- Changing public API contracts +- Adding new dependencies +- Modifying database schema +- Deleting code files + +--- + +## Preferences + +### Likes + +### Dislikes diff --git a/ManagedCode.Storage.slnx b/ManagedCode.Storage.slnx index ea3ca1a..ded85fc 100644 --- a/ManagedCode.Storage.slnx +++ b/ManagedCode.Storage.slnx @@ -19,6 +19,7 @@ + @@ -26,4 +27,4 @@ - \ No newline at end of file + diff --git a/README.md b/README.md index a614d0e..aef8691 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Cross-provider blob storage toolkit for .NET and ASP.NET streaming scenarios. -ManagedCode.Storage wraps vendor SDKs behind a single `IStorage` abstraction so uploads, downloads, metadata, streaming, and retention behave the same regardless of provider. Swap between Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, SFTP, a local file system, or the in-memory Virtual File System without rewriting application code. Pair it with our ASP.NET controllers and SignalR client to deliver chunked uploads, ranged downloads, and progress notifications end to end. +ManagedCode.Storage wraps vendor SDKs behind a single `IStorage` abstraction so uploads, downloads, metadata, streaming, and retention behave the same regardless of provider. Swap between Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, a local file system, or the in-memory Virtual File System without rewriting application code. Pair it with our ASP.NET controllers and SignalR client to deliver chunked uploads, ranged downloads, and progress notifications end to end. ## Motivation @@ -22,7 +22,7 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa ## Features - Unified `IStorage` abstraction covering upload, download, streaming, metadata, deletion, container management, and legal hold operations backed by `Result` responses. -- Provider coverage across Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, SFTP, local file system, and the in-memory Virtual File System (VFS). +- Provider coverage across Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive (Microsoft Graph), Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, local file system, and the in-memory Virtual File System (VFS). - Keyed dependency-injection registrations plus default provider helpers to fan out files per tenant, region, or workload without manual service plumbing. - ASP.NET storage controllers, chunk orchestration services, and a SignalR hub/client pair that deliver resumable uploads, ranged downloads, CRC32 validation, and real-time progress. - `ManagedCode.Storage.Client` brings streaming uploads/downloads, CRC32 helpers, and MIME discovery via `MimeHelper` to any .NET app. @@ -51,44 +51,138 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa | [ManagedCode.Storage.Gcp](https://www.nuget.org/packages/ManagedCode.Storage.Gcp) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Gcp.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Gcp) | Google Cloud Storage integration built on official SDKs. | | [ManagedCode.Storage.FileSystem](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.FileSystem.svg)](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | Local file system implementation for hybrid or on-premises workloads. | | [ManagedCode.Storage.Sftp](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Sftp.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | SFTP provider powered by SSH.NET for legacy and air-gapped environments. | +| [ManagedCode.Storage.OneDrive](https://www.nuget.org/packages/ManagedCode.Storage.OneDrive) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.OneDrive.svg)](https://www.nuget.org/packages/ManagedCode.Storage.OneDrive) | OneDrive provider built on Microsoft Graph. | +| [ManagedCode.Storage.GoogleDrive](https://www.nuget.org/packages/ManagedCode.Storage.GoogleDrive) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.GoogleDrive.svg)](https://www.nuget.org/packages/ManagedCode.Storage.GoogleDrive) | Google Drive provider built on the Google Drive API. | +| [ManagedCode.Storage.Dropbox](https://www.nuget.org/packages/ManagedCode.Storage.Dropbox) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Dropbox.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Dropbox) | Dropbox provider built on the Dropbox API. | +| [ManagedCode.Storage.CloudKit](https://www.nuget.org/packages/ManagedCode.Storage.CloudKit) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.CloudKit.svg)](https://www.nuget.org/packages/ManagedCode.Storage.CloudKit) | CloudKit (iCloud app data) provider built on CloudKit Web Services. | -### Configuring OneDrive, Google Drive, and Dropbox +### Configuring OneDrive, Google Drive, Dropbox, and CloudKit -> iCloud does not expose a public file API suitable for server-side integrations, so only Microsoft, Google, and Dropbox cloud drives are covered here. +> iCloud Drive does not expose a public server-side file API. `ManagedCode.Storage.CloudKit` targets CloudKit Web Services (iCloud app data), not iCloud Drive. + +These providers follow the same DI patterns as the other backends: use `Add*StorageAsDefault(...)` to bind `IStorage`, or `Add*Storage(...)` to inject the provider interface (`IOneDriveStorage`, `IGoogleDriveStorage`, `IDropboxStorage`, `ICloudKitStorage`). + +Most cloud-drive providers expect you to create the official SDK client (Graph/Drive/Dropbox) with your preferred auth flow and pass it into the storage options. ManagedCode.Storage does not run OAuth flows automatically. + +Keyed registrations are available as well (useful for multi-tenant apps): + +```csharp +using Dropbox.Api; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Dropbox.Extensions; + +var accessToken = configuration["Dropbox:AccessToken"]; // obtained via OAuth (see Dropbox section below) +var dropboxClient = new DropboxClient(accessToken); + +builder.Services.AddDropboxStorageAsDefault("tenant-a", options => +{ + options.DropboxClient = dropboxClient; + options.RootPath = "/apps/my-app"; +}); + +var tenantStorage = app.Services.GetRequiredKeyedService("tenant-a"); +``` **OneDrive / Microsoft Graph** +0. Install the provider package and import DI extensions: + + ```bash + dotnet add package ManagedCode.Storage.OneDrive + dotnet add package Azure.Identity + ``` + + ```csharp + using ManagedCode.Storage.OneDrive.Extensions; + ``` + + Docs: [Register an app](https://learn.microsoft.com/en-us/entra/identity-platform/quickstart-register-app), [Microsoft Graph auth](https://learn.microsoft.com/en-us/graph/auth/). + 1. Create an app registration in Azure Active Directory (Entra ID) and record the **Application (client) ID**, **Directory (tenant) ID**, and a **client secret**. -2. Add the Microsoft Graph **Files.ReadWrite.All** delegated permission (or **Sites.ReadWrite.All** if you target SharePoint drives) and grant admin consent. -3. In your ASP.NET app, acquire a token via `ClientSecretCredential` or another `TokenCredential` and pass it to `new GraphServiceClient(credential, new[] { "https://graph.microsoft.com/.default" })`. +2. In **API permissions**, add Microsoft Graph permissions: + - For server-to-server apps: **Application** → `Files.ReadWrite.All` (or `Sites.ReadWrite.All` for SharePoint drives), then **Grant admin consent**. + - For user flows: **Delegated** permissions are also possible, but you must supply a Graph client that authenticates as the user. +3. Create the Graph client (example uses client credentials): + + ```csharp + using Azure.Identity; + using Microsoft.Graph; + + var tenantId = configuration["OneDrive:TenantId"]!; + var clientId = configuration["OneDrive:ClientId"]!; + var clientSecret = configuration["OneDrive:ClientSecret"]!; + + var credential = new ClientSecretCredential(tenantId, clientId, clientSecret); + var graphClient = new GraphServiceClient(credential, new[] { "https://graph.microsoft.com/.default" }); + ``` + 4. Register OneDrive storage with the Graph client and the drive/root you want to scope to: ```csharp builder.Services.AddOneDriveStorageAsDefault(options => { - options.GraphClient = graphClient; // from step 3 + options.GraphClient = graphClient; options.DriveId = "me"; // or a specific drive ID options.RootPath = "app-data"; // folder will be created when CreateContainerIfNotExists is true options.CreateContainerIfNotExists = true; }); ``` -5. If you need to impersonate a specific drive item, swap `DriveId` for the drive GUID returned by Graph. +5. If you need a concrete drive id, fetch it via Graph (example): + + ```csharp + var drive = await graphClient.Me.Drive.GetAsync(); + var driveId = drive?.Id; + ``` **Google Drive** +0. Install the provider package and import DI extensions: + + ```bash + dotnet add package ManagedCode.Storage.GoogleDrive + ``` + + ```csharp + using ManagedCode.Storage.GoogleDrive.Extensions; + ``` + + Docs: [Drive API overview](https://developers.google.com/drive/api/guides/about-sdk), [OAuth 2.0](https://developers.google.com/identity/protocols/oauth2). + 1. In [Google Cloud Console](https://console.cloud.google.com/), create a project and enable the **Google Drive API**. -2. Configure an OAuth consent screen and create an **OAuth 2.0 Client ID** (Desktop or Web). Record the client ID and secret. -3. Exchange the OAuth code for a refresh token with the `https://www.googleapis.com/auth/drive.file` scope (or broader if necessary). -4. Add the Google Drive provider and feed the credentials to the options: +2. Create credentials: + - **Service account** (recommended for server apps): create a service account and download a JSON key. + - **OAuth client** (interactive user auth): configure OAuth consent screen and create an OAuth client id/secret. +3. Create a `DriveService`. + + **Service account example:** ```csharp - builder.Services.AddGoogleDriveStorage(options => + using Google.Apis.Auth.OAuth2; + using Google.Apis.Drive.v3; + using Google.Apis.Services; + + var credential = GoogleCredential + .FromFile("service-account.json") + .CreateScoped(DriveService.Scope.Drive); + + var driveService = new DriveService(new BaseClientService.Initializer { - options.ClientId = configuration["GoogleDrive:ClientId"]!; - options.ClientSecret = configuration["GoogleDrive:ClientSecret"]!; - options.RefreshToken = configuration["GoogleDrive:RefreshToken"]!; - options.RootFolderId = "root"; // or a shared drive folder id + HttpClientInitializer = credential, + ApplicationName = "MyApp" + }); + ``` + + If you use a service account, share the target folder/drive with the service account email (or use a Shared Drive) so it can see your files. + +4. Register the Google Drive provider with the configured `DriveService` and a root folder id: + + ```csharp + builder.Services.AddGoogleDriveStorageAsDefault(options => + { + options.DriveService = driveService; + options.RootFolderId = "root"; // or a specific folder id you control + options.CreateContainerIfNotExists = true; }); ``` @@ -96,21 +190,99 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa **Dropbox** +0. Install the provider package and import DI extensions: + + ```bash + dotnet add package ManagedCode.Storage.Dropbox + ``` + + ```csharp + using ManagedCode.Storage.Dropbox.Extensions; + ``` + + Docs: [Dropbox App Console](https://www.dropbox.com/developers/apps), [OAuth guide](https://www.dropbox.com/developers/documentation/http/documentation#oauth2). + 1. Create an app in the [Dropbox App Console](https://www.dropbox.com/developers/apps) and choose **Scoped access** with the **Full Dropbox** or **App folder** type. -2. Under **Permissions**, enable `files.content.write`, `files.content.read`, and `files.metadata.write` and generate a refresh token via OAuth. -3. Register Dropbox storage with the access credentials and a root path (use `/` for full access apps or `/Apps/` for app folders): +2. Record the **App key** and **App secret** (Settings tab). +3. Under **Permissions**, enable `files.content.write`, `files.content.read`, `files.metadata.read`, and `files.metadata.write` (plus any additional scopes you need) and save changes. +4. Obtain an access token: + - For quick local testing, you can generate a token in the app console. + - For production, use OAuth code flow (example): + + ```csharp + using Dropbox.Api; + + var appKey = configuration["Dropbox:AppKey"]!; + var appSecret = configuration["Dropbox:AppSecret"]!; + var redirectUri = configuration["Dropbox:RedirectUri"]!; // must be registered in Dropbox app console + + // 1) Redirect user to: + // var authorizeUri = DropboxOAuth2Helper.GetAuthorizeUri(OAuthResponseType.Code, appKey, redirectUri, tokenAccessType: TokenAccessType.Offline); + // + // 2) Receive the 'code' on your redirect endpoint, then exchange it: + var auth = await DropboxOAuth2Helper.ProcessCodeFlowAsync(code, appKey, appSecret, redirectUri); + var accessToken = auth.AccessToken; + var refreshToken = auth.RefreshToken; // store securely if you requested offline access + ``` + +5. Create the Dropbox client and register Dropbox storage with a root path (use `/` for full access apps or `/Apps/` for app folders): ```csharp - builder.Services.AddDropboxStorage(options => + using Dropbox.Api; + builder.Services.AddDropboxStorageAsDefault(options => { - options.AppKey = configuration["Dropbox:AppKey"]!; - options.AppSecret = configuration["Dropbox:AppSecret"]!; - options.RefreshToken = configuration["Dropbox:RefreshToken"]!; + var accessToken = configuration["Dropbox:AccessToken"]!; + options.DropboxClient = new DropboxClient(accessToken); options.RootPath = "/apps/my-app"; + options.CreateContainerIfNotExists = true; + }); + ``` + +6. Store tokens in user secrets or environment variables; never commit them to source control. + +**CloudKit (iCloud app data)** + +0. Install the provider package and import DI extensions: + + ```bash + dotnet add package ManagedCode.Storage.CloudKit + ``` + + ```csharp + using ManagedCode.Storage.CloudKit.Extensions; + using ManagedCode.Storage.CloudKit.Options; + ``` + + Docs: [CloudKit Web Services Reference](https://developer.apple.com/library/archive/documentation/DataManagement/Conceptual/CloudKitWebServicesReference/index.html). + +1. In Apple Developer / CloudKit Dashboard, configure the container you want to use and note its container id (example: `iCloud.com.company.app`). +2. Ensure the file record type exists (default `MCStorageFile`). +3. Add these fields to the record type: + - `path` (String) — must be queryable/indexed for prefix listing. + - `contentType` (String) — optional but recommended. + - `file` (Asset) — stores the binary content. +4. Configure authentication: + - **API token** (`ckAPIToken`): create an API token for your container in CloudKit Dashboard and store it as a secret. + - **Server-to-server key** (public DB only): create a CloudKit key in Apple Developer (download the `.p8` private key, keep the key id). +5. Register CloudKit storage: + + ```csharp + builder.Services.AddCloudKitStorageAsDefault(options => + { + options.ContainerId = "iCloud.com.company.app"; + options.Environment = CloudKitEnvironment.Production; + options.Database = CloudKitDatabase.Public; + options.RootPath = "app-data"; + + // Choose ONE auth mode: + options.ApiToken = configuration["CloudKit:ApiToken"]; + // OR: + // options.ServerToServerKeyId = configuration["CloudKit:KeyId"]; + // options.ServerToServerPrivateKeyPem = configuration["CloudKit:PrivateKeyPem"]; // paste PEM (.p8) contents }); ``` -4. Dropbox issues short-lived access tokens from refresh tokens; the SDK handles the exchange automatically once configured. +6. CloudKit Web Services impose size limits; keep files reasonably small and validate against your current CloudKit quotas. ### ASP.NET & Clients @@ -145,6 +317,10 @@ flowchart LR AzureDL["Azure Data Lake"] Aws["Amazon S3"] Gcp["Google Cloud Storage"] + OneDrive["OneDrive (Graph)"] + GoogleDrive["Google Drive"] + Dropbox["Dropbox"] + CloudKit["CloudKit (iCloud app data)"] Fs["File System"] Sftp["SFTP"] end @@ -156,11 +332,15 @@ flowchart LR Factories --> AzureDL Factories --> Aws Factories --> Gcp + Factories --> OneDrive + Factories --> GoogleDrive + Factories --> Dropbox + Factories --> CloudKit Factories --> Fs Factories --> Sftp ``` -Keyed provider registrations let you resolve multiple named instances from dependency injection while reusing the same abstraction across Azure, AWS, GCP, SFTP, and local file system storage. +Keyed provider registrations let you resolve multiple named instances from dependency injection while reusing the same abstraction across Azure, AWS, Google Cloud Storage, Google Drive, OneDrive, Dropbox, CloudKit, SFTP, and local file system storage. ### ASP.NET Streaming Controllers @@ -348,6 +528,8 @@ Need resumable uploads or live progress UI? Call AddStorageSignalR() UploadAsync(string recordName, string internalPath, Stream content, string contentType, CancellationToken cancellationToken) + { + var uploadUrl = await GetAssetUploadUrlAsync(recordName, cancellationToken); + var assetValue = await UploadAssetAsync(uploadUrl, content, contentType, cancellationToken); + var record = await UpsertRecordAsync(recordName, internalPath, contentType, assetValue, cancellationToken); + return record; + } + + public async Task DownloadAsync(string recordName, CancellationToken cancellationToken) + { + var record = await GetRecordAsync(recordName, cancellationToken) + ?? throw new FileNotFoundException($"CloudKit record '{recordName}' not found."); + + if (record.DownloadUrl == null) + { + throw new InvalidOperationException("CloudKit record does not include an asset download URL."); + } + + return await DownloadFromUrlAsync(record.DownloadUrl, cancellationToken); + } + + public async Task DeleteAsync(string recordName, CancellationToken cancellationToken) + { + var payload = new Dictionary + { + ["operations"] = new object[] + { + new Dictionary + { + ["operationType"] = "forceDelete", + ["record"] = new Dictionary + { + ["recordName"] = recordName + } + } + } + }; + + var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); + if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) + { + if (errorCode == "NOT_FOUND") + { + return false; + } + + throw new InvalidOperationException($"CloudKit delete failed with error code '{errorCode}'."); + } + + return true; + } + + public async Task ExistsAsync(string recordName, CancellationToken cancellationToken) + { + return await GetRecordAsync(recordName, cancellationToken) != null; + } + + public async Task GetRecordAsync(string recordName, CancellationToken cancellationToken) + { + var payload = new Dictionary + { + ["records"] = new object[] + { + new Dictionary + { + ["recordName"] = recordName + } + }, + ["desiredKeys"] = new[] { _options.PathFieldName, _options.ContentTypeFieldName, _options.AssetFieldName } + }; + + var document = await SendCloudKitAsync("records/lookup", payload, cancellationToken); + if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) + { + if (errorCode == "NOT_FOUND") + { + return null; + } + + throw new InvalidOperationException($"CloudKit lookup failed with error code '{errorCode}'."); + } + + if (!document.RootElement.TryGetProperty("records", out var records) || records.ValueKind != JsonValueKind.Array) + { + return null; + } + + var first = records.EnumerateArray().FirstOrDefault(); + return first.ValueKind == JsonValueKind.Object ? ParseRecord(first) : null; + } + + public async IAsyncEnumerable QueryByPathPrefixAsync(string pathPrefix, [System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken) + { + var marker = (string?)null; + do + { + cancellationToken.ThrowIfCancellationRequested(); + + var payload = new Dictionary + { + ["query"] = new Dictionary + { + ["recordType"] = _options.RecordType, + ["filterBy"] = new object[] + { + new Dictionary + { + ["fieldName"] = _options.PathFieldName, + ["comparator"] = "BEGINS_WITH", + ["fieldValue"] = new Dictionary + { + ["value"] = pathPrefix + } + } + } + }, + ["desiredKeys"] = new[] { _options.PathFieldName, _options.ContentTypeFieldName, _options.AssetFieldName }, + ["resultsLimit"] = 200 + }; + + if (!string.IsNullOrWhiteSpace(marker)) + { + payload["continuationMarker"] = marker; + } + + var document = await SendCloudKitAsync("records/query", payload, cancellationToken); + if (document.RootElement.TryGetProperty("records", out var records) && records.ValueKind == JsonValueKind.Array) + { + foreach (var record in records.EnumerateArray()) + { + cancellationToken.ThrowIfCancellationRequested(); + if (record.ValueKind == JsonValueKind.Object) + { + yield return ParseRecord(record); + } + } + } + + marker = document.RootElement.TryGetProperty("continuationMarker", out var markerElement) && markerElement.ValueKind == JsonValueKind.String + ? markerElement.GetString() + : null; + } while (!string.IsNullOrWhiteSpace(marker)); + } + + private async Task GetAssetUploadUrlAsync(string recordName, CancellationToken cancellationToken) + { + var payload = new Dictionary + { + ["tokens"] = new object[] + { + new Dictionary + { + ["recordType"] = _options.RecordType, + ["recordName"] = recordName, + ["fieldName"] = _options.AssetFieldName + } + } + }; + + var document = await SendCloudKitAsync("assets/upload", payload, cancellationToken); + if (!document.RootElement.TryGetProperty("tokens", out var tokens) || tokens.ValueKind != JsonValueKind.Array) + { + throw new InvalidOperationException("CloudKit assets/upload response does not include tokens."); + } + + var token = tokens.EnumerateArray().FirstOrDefault(); + if (token.ValueKind != JsonValueKind.Object || !token.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String) + { + throw new InvalidOperationException("CloudKit assets/upload response does not include an upload URL."); + } + + return new Uri(urlElement.GetString()!, UriKind.Absolute); + } + + private async Task UploadAssetAsync(Uri uploadUrl, Stream content, string contentType, CancellationToken cancellationToken) + { + if (content.CanSeek) + { + content.Position = 0; + } + + using var request = new HttpRequestMessage(HttpMethod.Post, uploadUrl) + { + Content = new StreamContent(content) + }; + + request.Content.Headers.ContentType = new MediaTypeHeaderValue(string.IsNullOrWhiteSpace(contentType) ? "application/octet-stream" : contentType); + + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken); + var json = await response.Content.ReadAsByteArrayAsync(cancellationToken); + if (!response.IsSuccessStatusCode) + { + throw new HttpRequestException($"CloudKit asset upload failed with status {(int)response.StatusCode}.", null, response.StatusCode); + } + + using var document = JsonDocument.Parse(json); + if (!document.RootElement.TryGetProperty("singleFile", out var singleFile) || singleFile.ValueKind != JsonValueKind.Object) + { + throw new InvalidOperationException("CloudKit asset upload response does not include 'singleFile'."); + } + + return singleFile.Clone(); + } + + private async Task UpsertRecordAsync(string recordName, string internalPath, string contentType, JsonElement assetValue, CancellationToken cancellationToken) + { + var fields = new Dictionary + { + [_options.PathFieldName] = new Dictionary { ["value"] = internalPath }, + [_options.ContentTypeFieldName] = new Dictionary { ["value"] = contentType }, + [_options.AssetFieldName] = new Dictionary { ["value"] = assetValue } + }; + + var payload = new Dictionary + { + ["operations"] = new object[] + { + new Dictionary + { + ["operationType"] = "forceUpdate", + ["record"] = new Dictionary + { + ["recordType"] = _options.RecordType, + ["recordName"] = recordName, + ["fields"] = fields + } + } + } + }; + + var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); + if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) + { + throw new InvalidOperationException($"CloudKit modify failed with error code '{errorCode}'."); + } + + if (!document.RootElement.TryGetProperty("records", out var records) || records.ValueKind != JsonValueKind.Array) + { + throw new InvalidOperationException("CloudKit modify response does not include records."); + } + + var first = records.EnumerateArray().FirstOrDefault(); + if (first.ValueKind != JsonValueKind.Object) + { + throw new InvalidOperationException("CloudKit modify response did not return a record."); + } + + return ParseRecord(first); + } + + private async Task SendCloudKitAsync(string operation, object payload, CancellationToken cancellationToken) + { + var subpath = BuildSubpath(operation); + var uri = BuildUri(subpath); + var body = JsonSerializer.SerializeToUtf8Bytes(payload, _jsonOptions); + + using var request = new HttpRequestMessage(HttpMethod.Post, uri); + request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + request.Content = new ByteArrayContent(body); + request.Content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + + ApplyAuthentication(request, subpath, body); + + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseContentRead, cancellationToken); + var json = await response.Content.ReadAsByteArrayAsync(cancellationToken); + + if (!response.IsSuccessStatusCode) + { + throw new HttpRequestException($"CloudKit request failed: {(int)response.StatusCode} {response.ReasonPhrase}", null, response.StatusCode); + } + + return JsonDocument.Parse(json); + } + + private async Task DownloadFromUrlAsync(Uri downloadUrl, CancellationToken cancellationToken) + { + var response = await _httpClient.GetAsync(downloadUrl, HttpCompletionOption.ResponseHeadersRead, cancellationToken); + if (!response.IsSuccessStatusCode) + { + response.Dispose(); + throw new HttpRequestException($"CloudKit asset download failed: {(int)response.StatusCode} {response.ReasonPhrase}", null, response.StatusCode); + } + + var stream = await response.Content.ReadAsStreamAsync(cancellationToken); + return new ResponseDisposingStream(stream, response); + } + + private string BuildSubpath(string operation) + { + var environment = _options.Environment == CloudKitEnvironment.Production ? "production" : "development"; + var database = _options.Database.ToString().ToLowerInvariant(); + return $"/database/1/{_options.ContainerId}/{environment}/{database}/{operation}"; + } + + private Uri BuildUri(string subpath) + { + var builder = new UriBuilder(BaseUri) + { + Path = subpath + }; + + var queryParts = new List(); + if (!string.IsNullOrWhiteSpace(_options.ApiToken)) + { + queryParts.Add("ckAPIToken=" + Uri.EscapeDataString(_options.ApiToken)); + } + + if (!string.IsNullOrWhiteSpace(_options.WebAuthToken)) + { + queryParts.Add("ckWebAuthToken=" + Uri.EscapeDataString(_options.WebAuthToken)); + } + + builder.Query = string.Join('&', queryParts); + return builder.Uri; + } + + private void ApplyAuthentication(HttpRequestMessage request, string subpath, byte[] body) + { + if (!string.IsNullOrWhiteSpace(_options.ApiToken)) + { + return; + } + + if (string.IsNullOrWhiteSpace(_options.ServerToServerKeyId) || _signingKey == null) + { + return; + } + + var date = DateTimeOffset.UtcNow.ToString("yyyy-MM-dd'T'HH:mm:ss'Z'"); + var bodyHash = Convert.ToBase64String(SHA256.HashData(body)); + var signatureData = $"{date}:{bodyHash}:{subpath}"; + var signatureBytes = _signingKey.SignData(Encoding.UTF8.GetBytes(signatureData), HashAlgorithmName.SHA256); + var signature = Convert.ToBase64String(signatureBytes); + + request.Headers.TryAddWithoutValidation("X-Apple-CloudKit-Request-KeyID", _options.ServerToServerKeyId); + request.Headers.TryAddWithoutValidation("X-Apple-CloudKit-Request-ISO8601Date", date); + request.Headers.TryAddWithoutValidation("X-Apple-CloudKit-Request-SignatureV1", signature); + } + + private CloudKitRecord ParseRecord(JsonElement record) + { + var recordName = record.TryGetProperty("recordName", out var nameElement) && nameElement.ValueKind == JsonValueKind.String + ? nameElement.GetString() ?? string.Empty + : string.Empty; + + var recordType = record.TryGetProperty("recordType", out var typeElement) && typeElement.ValueKind == JsonValueKind.String + ? typeElement.GetString() ?? _options.RecordType + : _options.RecordType; + + var createdOn = ReadTimestamp(record, "created"); + var lastModified = ReadTimestamp(record, "modified"); + + var fields = record.TryGetProperty("fields", out var fieldsElement) && fieldsElement.ValueKind == JsonValueKind.Object + ? fieldsElement + : default; + + var path = ReadStringField(fields, _options.PathFieldName) ?? string.Empty; + var contentType = ReadStringField(fields, _options.ContentTypeFieldName); + + var (downloadUrl, size) = ReadAsset(fields, _options.AssetFieldName); + + return new CloudKitRecord( + RecordName: recordName, + RecordType: recordType, + Path: path, + CreatedOn: createdOn, + LastModified: lastModified, + ContentType: contentType, + Size: size, + DownloadUrl: downloadUrl); + } + + private static DateTimeOffset ReadTimestamp(JsonElement record, string propertyName) + { + if (!record.TryGetProperty(propertyName, out var element) || element.ValueKind != JsonValueKind.Object) + { + return DateTimeOffset.UtcNow; + } + + if (!element.TryGetProperty("timestamp", out var timestamp) || (timestamp.ValueKind != JsonValueKind.Number && timestamp.ValueKind != JsonValueKind.String)) + { + return DateTimeOffset.UtcNow; + } + + if (timestamp.ValueKind == JsonValueKind.Number && timestamp.TryGetInt64(out var ms)) + { + return DateTimeOffset.FromUnixTimeMilliseconds(ms); + } + + if (timestamp.ValueKind == JsonValueKind.String && long.TryParse(timestamp.GetString(), out ms)) + { + return DateTimeOffset.FromUnixTimeMilliseconds(ms); + } + + return DateTimeOffset.UtcNow; + } + + private static string? ReadStringField(JsonElement fields, string fieldName) + { + if (fields.ValueKind != JsonValueKind.Object) + { + return null; + } + + if (!fields.TryGetProperty(fieldName, out var field) || field.ValueKind != JsonValueKind.Object) + { + return null; + } + + if (!field.TryGetProperty("value", out var value) || value.ValueKind != JsonValueKind.String) + { + return null; + } + + return value.GetString(); + } + + private static (Uri? DownloadUrl, ulong Size) ReadAsset(JsonElement fields, string fieldName) + { + if (fields.ValueKind != JsonValueKind.Object) + { + return (null, 0); + } + + if (!fields.TryGetProperty(fieldName, out var field) || field.ValueKind != JsonValueKind.Object) + { + return (null, 0); + } + + if (!field.TryGetProperty("value", out var value) || value.ValueKind != JsonValueKind.Object) + { + return (null, 0); + } + + Uri? downloadUrl = null; + if (value.TryGetProperty("downloadURL", out var urlElement) && urlElement.ValueKind == JsonValueKind.String) + { + var raw = urlElement.GetString(); + if (!string.IsNullOrWhiteSpace(raw) && Uri.TryCreate(raw, UriKind.Absolute, out var parsed)) + { + downloadUrl = parsed; + } + } + + ulong size = 0; + if (value.TryGetProperty("size", out var sizeElement)) + { + if (sizeElement.ValueKind == JsonValueKind.Number && sizeElement.TryGetUInt64(out var parsed)) + { + size = parsed; + } + else if (sizeElement.ValueKind == JsonValueKind.String && ulong.TryParse(sizeElement.GetString(), out parsed)) + { + size = parsed; + } + } + + return (downloadUrl, size); + } + + private static bool TryGetRecordErrorCode(JsonElement response, out string errorCode) + { + errorCode = string.Empty; + + if (!response.TryGetProperty("errors", out var errors) || errors.ValueKind != JsonValueKind.Array) + { + return false; + } + + var first = errors.EnumerateArray().FirstOrDefault(); + if (first.ValueKind != JsonValueKind.Object) + { + return false; + } + + if (first.TryGetProperty("serverErrorCode", out var codeElement) && codeElement.ValueKind == JsonValueKind.String) + { + errorCode = codeElement.GetString() ?? string.Empty; + return !string.IsNullOrWhiteSpace(errorCode); + } + + return false; + } + + public void Dispose() + { + _signingKey?.Dispose(); + if (_ownsHttpClient) + { + _httpClient.Dispose(); + } + } + + private sealed class ResponseDisposingStream : Stream + { + private readonly Stream _inner; + private readonly HttpResponseMessage _response; + + public ResponseDisposingStream(Stream inner, HttpResponseMessage response) + { + _inner = inner; + _response = response; + } + + public override bool CanRead => _inner.CanRead; + public override bool CanSeek => _inner.CanSeek; + public override bool CanWrite => _inner.CanWrite; + public override long Length => _inner.Length; + + public override long Position + { + get => _inner.Position; + set => _inner.Position = value; + } + + public override void Flush() => _inner.Flush(); + + public override int Read(byte[] buffer, int offset, int count) => _inner.Read(buffer, offset, count); + + public override long Seek(long offset, SeekOrigin origin) => _inner.Seek(offset, origin); + + public override void SetLength(long value) => _inner.SetLength(value); + + public override void Write(byte[] buffer, int offset, int count) => _inner.Write(buffer, offset, count); + + public override async ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) + { + return await _inner.ReadAsync(buffer, cancellationToken); + } + + public override async Task CopyToAsync(Stream destination, int bufferSize, CancellationToken cancellationToken) + { + await _inner.CopyToAsync(destination, bufferSize, cancellationToken); + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + _inner.Dispose(); + _response.Dispose(); + } + + base.Dispose(disposing); + } + + public override async ValueTask DisposeAsync() + { + await _inner.DisposeAsync(); + _response.Dispose(); + await base.DisposeAsync(); + } + } +} diff --git a/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitRecord.cs b/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitRecord.cs new file mode 100644 index 0000000..8ad6046 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitRecord.cs @@ -0,0 +1,14 @@ +using System; + +namespace ManagedCode.Storage.CloudKit.Clients; + +public sealed record CloudKitRecord( + string RecordName, + string RecordType, + string Path, + DateTimeOffset CreatedOn, + DateTimeOffset LastModified, + string? ContentType, + ulong Size, + Uri? DownloadUrl); + diff --git a/Storages/ManagedCode.Storage.CloudKit/Clients/ICloudKitClient.cs b/Storages/ManagedCode.Storage.CloudKit/Clients/ICloudKitClient.cs new file mode 100644 index 0000000..6134bb1 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Clients/ICloudKitClient.cs @@ -0,0 +1,22 @@ +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace ManagedCode.Storage.CloudKit.Clients; + +public interface ICloudKitClient +{ + Task UploadAsync(string recordName, string internalPath, Stream content, string contentType, CancellationToken cancellationToken); + + Task DownloadAsync(string recordName, CancellationToken cancellationToken); + + Task DeleteAsync(string recordName, CancellationToken cancellationToken); + + Task ExistsAsync(string recordName, CancellationToken cancellationToken); + + Task GetRecordAsync(string recordName, CancellationToken cancellationToken); + + IAsyncEnumerable QueryByPathPrefixAsync(string pathPrefix, CancellationToken cancellationToken); +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/CloudKitStorage.cs b/Storages/ManagedCode.Storage.CloudKit/CloudKitStorage.cs new file mode 100644 index 0000000..6d8267f --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/CloudKitStorage.cs @@ -0,0 +1,279 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Communication; +using ManagedCode.MimeTypes; +using ManagedCode.Storage.CloudKit.Clients; +using ManagedCode.Storage.CloudKit.Options; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Models; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.CloudKit; + +public class CloudKitStorage : BaseStorage, ICloudKitStorage +{ + private readonly ILogger? _logger; + + public CloudKitStorage(CloudKitStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions) + { + _logger = logger; + } + + protected override ICloudKitClient CreateStorageClient() + { + if (StorageOptions.Client != null) + { + return StorageOptions.Client; + } + + return new CloudKitClient(StorageOptions, StorageOptions.HttpClient); + } + + protected override Task CreateContainerInternalAsync(CancellationToken cancellationToken = default) + { + IsContainerCreated = true; + return Task.FromResult(Result.Succeed()); + } + + public override Task RemoveContainerAsync(CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Fail(new NotSupportedException("Deleting a CloudKit container is not supported."))); + } + + protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var prefix = BuildDirectoryPrefix(directory); + + await foreach (var record in StorageClient.QueryByPathPrefixAsync(prefix, cancellationToken)) + { + cancellationToken.ThrowIfCancellationRequested(); + _ = await StorageClient.DeleteAsync(record.RecordName, cancellationToken); + } + + return Result.Succeed(); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + cancellationToken.ThrowIfCancellationRequested(); + + var fullName = NormalizeRelativePath(options.FullPath); + var internalPath = BuildInternalPath(fullName); + var recordName = CreateRecordName(internalPath); + + var record = await StorageClient.UploadAsync(recordName, internalPath, stream, options.MimeType ?? MimeHelper.GetMimeType(options.FileName), cancellationToken); + return Result.Succeed(ToBlobMetadata(record, StripRoot(record.Path))); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var recordName = CreateRecordName(BuildInternalPath(NormalizeRelativePath(options.FullPath))); + + var remoteStream = await StorageClient.DownloadAsync(recordName, cancellationToken); + cancellationToken.ThrowIfCancellationRequested(); + + await using (remoteStream) + await using (var fileStream = localFile.FileStream) + { + await remoteStream.CopyToAsync(fileStream, cancellationToken); + fileStream.Position = 0; + } + + return Result.Succeed(localFile); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var recordName = CreateRecordName(BuildInternalPath(NormalizeRelativePath(options.FullPath))); + var deleted = await StorageClient.DeleteAsync(recordName, cancellationToken); + return Result.Succeed(deleted); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var recordName = CreateRecordName(BuildInternalPath(NormalizeRelativePath(options.FullPath))); + var exists = await StorageClient.ExistsAsync(recordName, cancellationToken); + return Result.Succeed(exists); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var fullName = NormalizeRelativePath(options.FullPath); + var recordName = CreateRecordName(BuildInternalPath(fullName)); + var record = await StorageClient.GetRecordAsync(recordName, cancellationToken); + if (record == null) + { + return Result.Fail(new FileNotFoundException($"CloudKit record '{fullName}' not found.")); + } + + return Result.Succeed(ToBlobMetadata(record, StripRoot(record.Path))); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + await EnsureContainerExist(cancellationToken); + var prefix = BuildDirectoryPrefix(directory); + + await foreach (var record in StorageClient.QueryByPathPrefixAsync(prefix, cancellationToken)) + { + cancellationToken.ThrowIfCancellationRequested(); + yield return ToBlobMetadata(record, StripRoot(record.Path)); + } + } + + public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default) + { + try + { + await EnsureContainerExist(cancellationToken); + var recordName = CreateRecordName(BuildInternalPath(NormalizeRelativePath(fileName))); + var stream = await StorageClient.DownloadAsync(recordName, cancellationToken); + return Result.Succeed(stream); + } + catch (Exception ex) + { + _logger.LogException(ex); + return Result.Fail(ex); + } + } + + protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed()); + } + + protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default) + { + return Task.FromResult(Result.Succeed(false)); + } + + private string BuildDirectoryPrefix(string? directory) + { + var root = NormalizeRelativePath(StorageOptions.RootPath); + if (string.IsNullOrWhiteSpace(directory)) + { + return string.IsNullOrWhiteSpace(root) ? string.Empty : root.TrimEnd('/') + "/"; + } + + var dir = NormalizeRelativePath(directory); + var combined = string.IsNullOrWhiteSpace(root) ? dir : $"{root}/{dir}"; + return combined.TrimEnd('/') + "/"; + } + + private string BuildInternalPath(string relativeFullName) + { + var root = NormalizeRelativePath(StorageOptions.RootPath); + var normalized = NormalizeRelativePath(relativeFullName); + return string.IsNullOrWhiteSpace(root) + ? normalized + : string.IsNullOrWhiteSpace(normalized) ? root : $"{root}/{normalized}"; + } + + private string StripRoot(string internalPath) + { + var root = NormalizeRelativePath(StorageOptions.RootPath); + var normalized = NormalizeRelativePath(internalPath); + if (string.IsNullOrWhiteSpace(root)) + { + return normalized; + } + + if (normalized.Equals(root, StringComparison.OrdinalIgnoreCase)) + { + return string.Empty; + } + + var prefix = root.TrimEnd('/') + "/"; + return normalized.StartsWith(prefix, StringComparison.OrdinalIgnoreCase) + ? normalized[prefix.Length..] + : normalized; + } + + private static string NormalizeRelativePath(string path) + { + return path.Replace("\\", "/").Trim('/'); + } + + private static string CreateRecordName(string internalPath) + { + var bytes = Encoding.UTF8.GetBytes(internalPath); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private BlobMetadata ToBlobMetadata(CloudKitRecord record, string fullName) + { + return new BlobMetadata + { + Name = Path.GetFileName(fullName), + FullName = fullName, + Container = StorageOptions.ContainerId, + Uri = record.DownloadUrl, + CreatedOn = record.CreatedOn, + LastModified = record.LastModified, + Length = record.Size, + MimeType = string.IsNullOrWhiteSpace(record.ContentType) ? MimeHelper.GetMimeType(fullName) : record.ContentType + }; + } +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/CloudKitStorageProvider.cs b/Storages/ManagedCode.Storage.CloudKit/CloudKitStorageProvider.cs new file mode 100644 index 0000000..01e2236 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/CloudKitStorageProvider.cs @@ -0,0 +1,49 @@ +using System; +using ManagedCode.Storage.CloudKit.Options; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Providers; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.CloudKit; + +public class CloudKitStorageProvider(IServiceProvider serviceProvider, CloudKitStorageOptions defaultOptions) : IStorageProvider +{ + public Type StorageOptionsType => typeof(CloudKitStorageOptions); + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage + where TOptions : class, IStorageOptions + { + if (options is not CloudKitStorageOptions cloudKitOptions) + { + throw new ArgumentException($"Options must be of type {typeof(CloudKitStorageOptions)}", nameof(options)); + } + + var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger; + var storage = new CloudKitStorage(cloudKitOptions, logger); + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); + } + + public IStorageOptions GetDefaultOptions() + { + return new CloudKitStorageOptions + { + ContainerId = defaultOptions.ContainerId, + Environment = defaultOptions.Environment, + Database = defaultOptions.Database, + RootPath = defaultOptions.RootPath, + RecordType = defaultOptions.RecordType, + PathFieldName = defaultOptions.PathFieldName, + AssetFieldName = defaultOptions.AssetFieldName, + ContentTypeFieldName = defaultOptions.ContentTypeFieldName, + ApiToken = defaultOptions.ApiToken, + WebAuthToken = defaultOptions.WebAuthToken, + ServerToServerKeyId = defaultOptions.ServerToServerKeyId, + ServerToServerPrivateKeyPem = defaultOptions.ServerToServerPrivateKeyPem, + HttpClient = defaultOptions.HttpClient, + Client = defaultOptions.Client, + CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists + }; + } +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.CloudKit/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..0a5feaa --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,130 @@ +using System; +using ManagedCode.Storage.CloudKit.Options; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using ManagedCode.Storage.Core.Providers; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.CloudKit.Extensions; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddCloudKitStorage(this IServiceCollection serviceCollection, Action action) + { + var options = new CloudKitStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddCloudKitStorage(options); + } + + public static IServiceCollection AddCloudKitStorageAsDefault(this IServiceCollection serviceCollection, Action action) + { + var options = new CloudKitStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddCloudKitStorageAsDefault(options); + } + + public static IServiceCollection AddCloudKitStorage(this IServiceCollection serviceCollection, CloudKitStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + return serviceCollection.AddSingleton(sp => new CloudKitStorage(options, sp.GetService>())); + } + + public static IServiceCollection AddCloudKitStorageAsDefault(this IServiceCollection serviceCollection, CloudKitStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(sp => new CloudKitStorage(options, sp.GetService>())); + return serviceCollection.AddSingleton(sp => sp.GetRequiredService()); + } + + public static IServiceCollection AddCloudKitStorage(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new CloudKitStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new CloudKitStorage(opts, sp.GetService>()); + }); + + return serviceCollection; + } + + public static IServiceCollection AddCloudKitStorageAsDefault(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new CloudKitStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new CloudKitStorage(opts, sp.GetService>()); + }); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + sp.GetRequiredKeyedService(k)); + + return serviceCollection; + } + + private static void CheckConfiguration(CloudKitStorageOptions options) + { + if (string.IsNullOrWhiteSpace(options.ContainerId)) + { + throw new BadConfigurationException($"{nameof(options.ContainerId)} cannot be empty."); + } + + if (string.IsNullOrWhiteSpace(options.RecordType)) + { + throw new BadConfigurationException($"{nameof(options.RecordType)} cannot be empty."); + } + + if (string.IsNullOrWhiteSpace(options.PathFieldName) || string.IsNullOrWhiteSpace(options.AssetFieldName)) + { + throw new BadConfigurationException("CloudKit storage requires configured field names."); + } + + var hasApiToken = !string.IsNullOrWhiteSpace(options.ApiToken); + var hasServerKey = !string.IsNullOrWhiteSpace(options.ServerToServerKeyId) || !string.IsNullOrWhiteSpace(options.ServerToServerPrivateKeyPem); + + if (hasApiToken && hasServerKey) + { + throw new BadConfigurationException("CloudKit storage must use either API token authentication or server-to-server signing (not both)."); + } + + if (!hasApiToken && options.Client == null) + { + if (string.IsNullOrWhiteSpace(options.ServerToServerKeyId) || string.IsNullOrWhiteSpace(options.ServerToServerPrivateKeyPem)) + { + throw new BadConfigurationException("CloudKit storage requires either an API token (ckAPIToken) or a server-to-server key (key id + private key PEM), unless a custom ICloudKitClient is supplied."); + } + + if (options.Database != CloudKitDatabase.Public) + { + throw new BadConfigurationException("Server-to-server keys are supported only for the public database."); + } + } + + if (!string.IsNullOrWhiteSpace(options.WebAuthToken) && !hasApiToken) + { + throw new BadConfigurationException($"{nameof(options.WebAuthToken)} requires {nameof(options.ApiToken)}."); + } + } +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/ICloudKitStorage.cs b/Storages/ManagedCode.Storage.CloudKit/ICloudKitStorage.cs new file mode 100644 index 0000000..ad28032 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/ICloudKitStorage.cs @@ -0,0 +1,10 @@ +using ManagedCode.Storage.Core; +using ManagedCode.Storage.CloudKit.Clients; +using ManagedCode.Storage.CloudKit.Options; + +namespace ManagedCode.Storage.CloudKit; + +public interface ICloudKitStorage : IStorage +{ +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/ManagedCode.Storage.CloudKit.csproj b/Storages/ManagedCode.Storage.CloudKit/ManagedCode.Storage.CloudKit.csproj new file mode 100644 index 0000000..5336b3d --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/ManagedCode.Storage.CloudKit.csproj @@ -0,0 +1,20 @@ + + + true + + + ManagedCode.Storage.CloudKit + ManagedCode.Storage.CloudKit + Apple CloudKit (iCloud app data) provider for ManagedCode.Storage. + managedcode, storage, cloudkit, icloud + + + + + + + + + + + diff --git a/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitDatabase.cs b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitDatabase.cs new file mode 100644 index 0000000..bced443 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitDatabase.cs @@ -0,0 +1,9 @@ +namespace ManagedCode.Storage.CloudKit.Options; + +public enum CloudKitDatabase +{ + Public, + Private, + Shared +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitEnvironment.cs b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitEnvironment.cs new file mode 100644 index 0000000..c42d3e8 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitEnvironment.cs @@ -0,0 +1,8 @@ +namespace ManagedCode.Storage.CloudKit.Options; + +public enum CloudKitEnvironment +{ + Development, + Production +} + diff --git a/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs new file mode 100644 index 0000000..63bd678 --- /dev/null +++ b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs @@ -0,0 +1,66 @@ +using System.Net.Http; +using ManagedCode.Storage.CloudKit.Clients; +using ManagedCode.Storage.Core; + +namespace ManagedCode.Storage.CloudKit.Options; + +public class CloudKitStorageOptions : IStorageOptions +{ + public bool CreateContainerIfNotExists { get; set; } + + /// + /// CloudKit container identifier, e.g. iCloud.com.company.app. + /// + public string ContainerId { get; set; } = string.Empty; + + public CloudKitEnvironment Environment { get; set; } = CloudKitEnvironment.Development; + + public CloudKitDatabase Database { get; set; } = CloudKitDatabase.Public; + + /// + /// Optional prefix applied to all blob paths (like a virtual folder). + /// + public string RootPath { get; set; } = string.Empty; + + /// + /// CloudKit record type that stores files. + /// + public string RecordType { get; set; } = "MCStorageFile"; + + public string PathFieldName { get; set; } = "path"; + + public string AssetFieldName { get; set; } = "file"; + + public string ContentTypeFieldName { get; set; } = "contentType"; + + /// + /// API token authentication (ckAPIToken) for CloudKit Web Services. + /// + public string? ApiToken { get; set; } + + /// + /// Optional user authentication token (ckWebAuthToken) for private database access. + /// + public string? WebAuthToken { get; set; } + + /// + /// Server-to-server key id for signed requests (X-Apple-CloudKit-Request-KeyID). + /// + public string? ServerToServerKeyId { get; set; } + + /// + /// Server-to-server private key in PEM (PKCS8) format. + /// + public string? ServerToServerPrivateKeyPem { get; set; } + + /// + /// Optional custom HttpClient used for CloudKit Web Services requests. + /// + public HttpClient? HttpClient { get; set; } + + /// + /// Optional custom CloudKit client (useful for tests). + /// + public ICloudKitClient? Client { get; set; } +} + diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs index 0bcab55..1c0c5bd 100644 --- a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs +++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs @@ -27,17 +27,24 @@ public async Task EnsureRootAsync(string rootPath, bool createIfNotExists, Cance } var normalized = Normalize(rootPath); + if (normalized == "/") + { + return; + } + try { + cancellationToken.ThrowIfCancellationRequested(); await _client.Files.GetMetadataAsync(normalized); } catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound) { if (!createIfNotExists) { - return; + throw new DirectoryNotFoundException($"Dropbox folder '{normalized}' is missing."); } + cancellationToken.ThrowIfCancellationRequested(); await _client.Files.CreateFolderV2Async(normalized, autorename: false); } } @@ -45,23 +52,33 @@ public async Task EnsureRootAsync(string rootPath, bool createIfNotExists, Cance public async Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) { var fullPath = Combine(rootPath, path); + cancellationToken.ThrowIfCancellationRequested(); var uploaded = await _client.Files.UploadAsync(fullPath, WriteMode.Overwrite.Instance, body: content); - var metadata = (await _client.Files.GetMetadataAsync(uploaded.PathLower)).AsFile; - return ToItem(metadata); + return ToItem(uploaded); } public async Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) { var fullPath = Combine(rootPath, path); + cancellationToken.ThrowIfCancellationRequested(); var response = await _client.Files.DownloadAsync(fullPath); + cancellationToken.ThrowIfCancellationRequested(); return await response.GetContentAsStreamAsync(); } public async Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) { var fullPath = Combine(rootPath, path); - await _client.Files.DeleteV2Async(fullPath); - return true; + try + { + cancellationToken.ThrowIfCancellationRequested(); + await _client.Files.DeleteV2Async(fullPath); + return true; + } + catch (ApiException ex) when (ex.ErrorResponse.IsPathLookup && ex.ErrorResponse.AsPathLookup.Value.IsNotFound) + { + return false; + } } public async Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) @@ -69,6 +86,7 @@ public async Task ExistsAsync(string rootPath, string path, CancellationTo var fullPath = Combine(rootPath, path); try { + cancellationToken.ThrowIfCancellationRequested(); await _client.Files.GetMetadataAsync(fullPath); return true; } @@ -83,6 +101,7 @@ public async Task ExistsAsync(string rootPath, string path, CancellationTo var fullPath = Combine(rootPath, path); try { + cancellationToken.ThrowIfCancellationRequested(); var metadata = await _client.Files.GetMetadataAsync(fullPath); return metadata.IsFile ? ToItem(metadata.AsFile) : null; } @@ -95,24 +114,22 @@ public async Task ExistsAsync(string rootPath, string path, CancellationTo public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) { var fullPath = Combine(rootPath, directory ?? string.Empty); + cancellationToken.ThrowIfCancellationRequested(); var list = await _client.Files.ListFolderAsync(fullPath); - foreach (var item in list.Entries) + foreach (var item in list.Entries.Where(item => item.IsFile)) { - if (item.IsFile) - { - yield return ToItem(item.AsFile); - } + cancellationToken.ThrowIfCancellationRequested(); + yield return ToItem(item.AsFile); } while (list.HasMore) { + cancellationToken.ThrowIfCancellationRequested(); list = await _client.Files.ListFolderContinueAsync(list.Cursor); - foreach (var item in list.Entries) + foreach (var item in list.Entries.Where(item => item.IsFile)) { - if (item.IsFile) - { - yield return ToItem(item.AsFile); - } + cancellationToken.ThrowIfCancellationRequested(); + yield return ToItem(item.AsFile); } } } diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs index 55db4e5..4f8393b 100644 --- a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs +++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs @@ -6,6 +6,7 @@ using System.Threading; using System.Threading.Tasks; using ManagedCode.Communication; +using ManagedCode.MimeTypes; using ManagedCode.Storage.Core; using ManagedCode.Storage.Core.Models; using ManagedCode.Storage.Dropbox.Clients; @@ -66,10 +67,15 @@ protected override async Task DeleteDirectoryInternalAsync(string direct await EnsureContainerExist(cancellationToken); var normalizedDirectory = NormalizeRelativePath(directory); - await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken)) + if (!string.IsNullOrWhiteSpace(normalizedDirectory)) { - var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}"; - await StorageClient.DeleteAsync(StorageOptions.RootPath, path!, cancellationToken); + _ = await StorageClient.DeleteAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken); + return Result.Succeed(); + } + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, null, cancellationToken)) + { + _ = await StorageClient.DeleteAsync(StorageOptions.RootPath, item.Name, cancellationToken); } return Result.Succeed(); @@ -214,10 +220,7 @@ protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions private string BuildFullPath(string? relativePath) { - var normalized = NormalizeRelativePath(relativePath ?? string.Empty); - return string.IsNullOrWhiteSpace(StorageOptions.RootPath) - ? normalized - : string.IsNullOrWhiteSpace(normalized) ? StorageOptions.RootPath.Trim('/') : $"{StorageOptions.RootPath.Trim('/')}/{normalized}"; + return NormalizeRelativePath(relativePath ?? string.Empty); } private static string NormalizeRelativePath(string path) @@ -236,7 +239,7 @@ private BlobMetadata ToBlobMetadata(DropboxItemMetadata file, string fullName) CreatedOn = file.ClientModified, LastModified = file.ServerModified, Length = file.Size, - MimeType = file.Name + MimeType = MimeHelper.GetMimeType(file.Name) }; } } diff --git a/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..476b077 --- /dev/null +++ b/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,93 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.Dropbox.Options; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.Dropbox.Extensions; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddDropboxStorage(this IServiceCollection serviceCollection, Action action) + { + var options = new DropboxStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddDropboxStorage(options); + } + + public static IServiceCollection AddDropboxStorageAsDefault(this IServiceCollection serviceCollection, Action action) + { + var options = new DropboxStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddDropboxStorageAsDefault(options); + } + + public static IServiceCollection AddDropboxStorage(this IServiceCollection serviceCollection, DropboxStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + return serviceCollection.AddSingleton(sp => new DropboxStorage(options, sp.GetService>())); + } + + public static IServiceCollection AddDropboxStorageAsDefault(this IServiceCollection serviceCollection, DropboxStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(sp => new DropboxStorage(options, sp.GetService>())); + return serviceCollection.AddSingleton(sp => sp.GetRequiredService()); + } + + public static IServiceCollection AddDropboxStorage(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new DropboxStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new DropboxStorage(opts, sp.GetService>()); + }); + + return serviceCollection; + } + + public static IServiceCollection AddDropboxStorageAsDefault(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new DropboxStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new DropboxStorage(opts, sp.GetService>()); + }); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + sp.GetRequiredKeyedService(k)); + + return serviceCollection; + } + + private static void CheckConfiguration(DropboxStorageOptions options) + { + if (options.Client == null && options.DropboxClient == null) + { + throw new BadConfigurationException("Dropbox storage requires either a configured DropboxClient or a custom IDropboxClientWrapper."); + } + } +} diff --git a/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj index 349d40d..0755df2 100644 --- a/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj +++ b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj @@ -13,7 +13,7 @@ - - + + diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs index 2dacdc4..150cc5f 100644 --- a/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs +++ b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs @@ -5,6 +5,7 @@ using System.Runtime.CompilerServices; using System.Threading; using System.Threading.Tasks; +using System.IO.Pipelines; using Google.Apis.Drive.v3; using DriveFile = Google.Apis.Drive.v3.Data.File; @@ -12,6 +13,7 @@ namespace ManagedCode.Storage.GoogleDrive.Clients; public class GoogleDriveClient : IGoogleDriveClient { + private const string FolderMimeType = "application/vnd.google-apps.folder"; private readonly DriveService _driveService; public GoogleDriveClient(DriveService driveService) @@ -37,16 +39,29 @@ public async Task UploadAsync(string rootFolderId, string path, Strea var request = _driveService.Files.Create(fileMetadata, content, contentType ?? "application/octet-stream"); request.Fields = "id,name,parents,createdTime,modifiedTime,md5Checksum,size"; - return await request.UploadAsync(cancellationToken).ContinueWith(async _ => await _driveService.Files.Get(request.ResponseBody.Id).ExecuteAsync(cancellationToken)).Unwrap(); + await request.UploadAsync(cancellationToken); + return request.ResponseBody ?? throw new InvalidOperationException("Google Drive upload returned no metadata."); } public async Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken) { var file = await FindFileByPathAsync(rootFolderId, path, cancellationToken) ?? throw new FileNotFoundException(path); - var stream = new MemoryStream(); - await _driveService.Files.Get(file.Id).DownloadAsync(stream, cancellationToken); - stream.Position = 0; - return stream; + var pipe = new Pipe(); + _ = Task.Run(async () => + { + try + { + await using var destination = pipe.Writer.AsStream(leaveOpen: true); + await _driveService.Files.Get(file.Id).DownloadAsync(destination, cancellationToken); + pipe.Writer.Complete(); + } + catch (Exception ex) + { + pipe.Writer.Complete(ex); + } + }, cancellationToken); + + return pipe.Reader.AsStream(); } public async Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken) @@ -57,7 +72,7 @@ public async Task DeleteAsync(string rootFolderId, string path, Cancellati return false; } - await _driveService.Files.Delete(file.Id).ExecuteAsync(cancellationToken); + await DeleteRecursiveAsync(file.Id, file.MimeType, cancellationToken); return true; } @@ -73,24 +88,35 @@ public async Task ExistsAsync(string rootFolderId, string path, Cancellati public async IAsyncEnumerable ListAsync(string rootFolderId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) { - var parentId = string.IsNullOrWhiteSpace(directory) - ? rootFolderId - : await EnsureFolderPathAsync(rootFolderId, directory!, false, cancellationToken) ?? rootFolderId; + string parentId; + if (string.IsNullOrWhiteSpace(directory)) + { + parentId = rootFolderId; + } + else + { + parentId = await EnsureFolderPathAsync(rootFolderId, directory!, false, cancellationToken) ?? string.Empty; + if (string.IsNullOrWhiteSpace(parentId)) + { + yield break; + } + } var request = _driveService.Files.List(); request.Q = $"'{parentId}' in parents and trashed=false"; - request.Fields = "files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)"; + request.Fields = "nextPageToken,files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)"; do { var response = await request.ExecuteAsync(cancellationToken); foreach (var file in response.Files ?? Enumerable.Empty()) { + cancellationToken.ThrowIfCancellationRequested(); yield return file; } request.PageToken = response.NextPageToken; - } while (!string.IsNullOrEmpty(request.PageToken) && !cancellationToken.IsCancellationRequested); + } while (!string.IsNullOrEmpty(request.PageToken)); } private async Task<(string ParentId, string Name)> EnsureParentFolderAsync(string rootFolderId, string fullPath, CancellationToken cancellationToken) @@ -139,6 +165,42 @@ public async IAsyncEnumerable ListAsync(string rootFolderId, string? return response.Files?.FirstOrDefault(); } + private async Task DeleteRecursiveAsync(string fileId, string? mimeType, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.Equals(mimeType, FolderMimeType, StringComparison.OrdinalIgnoreCase)) + { + await DeleteFolderChildrenAsync(fileId, cancellationToken); + } + + await _driveService.Files.Delete(fileId).ExecuteAsync(cancellationToken); + } + + private async Task DeleteFolderChildrenAsync(string folderId, CancellationToken cancellationToken) + { + var request = _driveService.Files.List(); + request.Q = $"'{folderId}' in parents and trashed=false"; + request.Fields = "nextPageToken,files(id,mimeType)"; + + do + { + var response = await request.ExecuteAsync(cancellationToken); + foreach (var entry in response.Files ?? Enumerable.Empty()) + { + cancellationToken.ThrowIfCancellationRequested(); + if (string.IsNullOrWhiteSpace(entry.Id)) + { + continue; + } + + await DeleteRecursiveAsync(entry.Id, entry.MimeType, cancellationToken); + } + + request.PageToken = response.NextPageToken; + } while (!string.IsNullOrWhiteSpace(request.PageToken)); + } + private async Task FindFileByPathAsync(string rootFolderId, string path, CancellationToken cancellationToken) { var normalizedPath = path.Replace("\\", "/").Trim('/'); diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..cb7df0e --- /dev/null +++ b/Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,93 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.GoogleDrive.Options; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.GoogleDrive.Extensions; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddGoogleDriveStorage(this IServiceCollection serviceCollection, Action action) + { + var options = new GoogleDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddGoogleDriveStorage(options); + } + + public static IServiceCollection AddGoogleDriveStorageAsDefault(this IServiceCollection serviceCollection, Action action) + { + var options = new GoogleDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddGoogleDriveStorageAsDefault(options); + } + + public static IServiceCollection AddGoogleDriveStorage(this IServiceCollection serviceCollection, GoogleDriveStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + return serviceCollection.AddSingleton(sp => new GoogleDriveStorage(options, sp.GetService>())); + } + + public static IServiceCollection AddGoogleDriveStorageAsDefault(this IServiceCollection serviceCollection, GoogleDriveStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(sp => new GoogleDriveStorage(options, sp.GetService>())); + return serviceCollection.AddSingleton(sp => sp.GetRequiredService()); + } + + public static IServiceCollection AddGoogleDriveStorage(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new GoogleDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new GoogleDriveStorage(opts, sp.GetService>()); + }); + + return serviceCollection; + } + + public static IServiceCollection AddGoogleDriveStorageAsDefault(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new GoogleDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new GoogleDriveStorage(opts, sp.GetService>()); + }); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + sp.GetRequiredKeyedService(k)); + + return serviceCollection; + } + + private static void CheckConfiguration(GoogleDriveStorageOptions options) + { + if (options.Client == null && options.DriveService == null) + { + throw new BadConfigurationException("Google Drive storage requires either a configured DriveService or a custom IGoogleDriveClient."); + } + } +} diff --git a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs index 2d6d941..ecad8d5 100644 --- a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs +++ b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs @@ -67,15 +67,20 @@ protected override async Task DeleteDirectoryInternalAsync(string direct await EnsureContainerExist(cancellationToken); var normalizedDirectory = NormalizeRelativePath(directory); - await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken)) + if (!string.IsNullOrWhiteSpace(normalizedDirectory)) { - if (item.MimeType == "application/vnd.google-apps.folder") + _ = await StorageClient.DeleteAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken); + return Result.Succeed(); + } + + await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, null, cancellationToken)) + { + if (string.IsNullOrWhiteSpace(item.Name)) { continue; } - var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}"; - await StorageClient.DeleteAsync(StorageOptions.RootFolderId, path!, cancellationToken); + _ = await StorageClient.DeleteAsync(StorageOptions.RootFolderId, item.Name, cancellationToken); } return Result.Succeed(); diff --git a/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj index e6ada89..a127a37 100644 --- a/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj +++ b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj @@ -13,7 +13,7 @@ - - + + diff --git a/Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs new file mode 100644 index 0000000..6ec6b08 --- /dev/null +++ b/Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs @@ -0,0 +1,98 @@ +using System; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using ManagedCode.Storage.Core.Providers; +using ManagedCode.Storage.OneDrive.Options; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace ManagedCode.Storage.OneDrive.Extensions; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddOneDriveStorage(this IServiceCollection serviceCollection, Action action) + { + var options = new OneDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddOneDriveStorage(options); + } + + public static IServiceCollection AddOneDriveStorageAsDefault(this IServiceCollection serviceCollection, Action action) + { + var options = new OneDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + return serviceCollection.AddOneDriveStorageAsDefault(options); + } + + public static IServiceCollection AddOneDriveStorage(this IServiceCollection serviceCollection, OneDriveStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + return serviceCollection.AddSingleton(sp => new OneDriveStorage(options, sp.GetService>())); + } + + public static IServiceCollection AddOneDriveStorageAsDefault(this IServiceCollection serviceCollection, OneDriveStorageOptions options) + { + CheckConfiguration(options); + serviceCollection.AddSingleton(options); + serviceCollection.AddSingleton(); + serviceCollection.AddSingleton(sp => new OneDriveStorage(options, sp.GetService>())); + return serviceCollection.AddSingleton(sp => sp.GetRequiredService()); + } + + public static IServiceCollection AddOneDriveStorage(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new OneDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new OneDriveStorage(opts, sp.GetService>()); + }); + + return serviceCollection; + } + + public static IServiceCollection AddOneDriveStorageAsDefault(this IServiceCollection serviceCollection, string key, Action action) + { + var options = new OneDriveStorageOptions(); + action.Invoke(options); + + CheckConfiguration(options); + + serviceCollection.AddKeyedSingleton(key, options); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + { + var opts = sp.GetRequiredKeyedService(k); + return new OneDriveStorage(opts, sp.GetService>()); + }); + serviceCollection.AddKeyedSingleton(key, (sp, k) => + sp.GetRequiredKeyedService(k)); + + return serviceCollection; + } + + private static void CheckConfiguration(OneDriveStorageOptions options) + { + if (options.Client == null && options.GraphClient == null) + { + throw new BadConfigurationException("OneDrive storage requires either a configured GraphServiceClient or a custom IOneDriveClient."); + } + + if (string.IsNullOrWhiteSpace(options.DriveId)) + { + throw new BadConfigurationException($"{nameof(options.DriveId)} cannot be empty."); + } + } +} diff --git a/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj index eae4f6d..a74b993 100644 --- a/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj +++ b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj @@ -13,7 +13,7 @@ - - + + diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs index 4f3d59d..46bc8d5 100644 --- a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs +++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs @@ -67,15 +67,23 @@ protected override async Task DeleteDirectoryInternalAsync(string direct await EnsureContainerExist(cancellationToken); var normalizedDirectory = NormalizeRelativePath(directory); - await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken)) + if (!string.IsNullOrWhiteSpace(normalizedDirectory)) { - if (item?.Folder != null) + _ = await StorageClient.DeleteAsync(StorageOptions.DriveId, BuildFullPath(normalizedDirectory), cancellationToken); + return Result.Succeed(); + } + + var rootPath = BuildFullPath(string.Empty); + var listPath = string.IsNullOrWhiteSpace(rootPath) ? null : rootPath; + await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, listPath, cancellationToken)) + { + if (item?.Name == null) { continue; } - var path = $"{normalizedDirectory}/{item!.Name}".Trim('/'); - await StorageClient.DeleteAsync(StorageOptions.DriveId, path, cancellationToken); + var childPath = string.IsNullOrWhiteSpace(rootPath) ? item.Name : $"{rootPath}/{item.Name}".Trim('/'); + _ = await StorageClient.DeleteAsync(StorageOptions.DriveId, childPath, cancellationToken); } return Result.Succeed(); @@ -92,9 +100,10 @@ protected override async Task> UploadInternalAsync(Stream s try { await EnsureContainerExist(cancellationToken); - var path = BuildFullPath(options.FullPath); + var fullName = NormalizeRelativePath(options.FullPath); + var path = BuildFullPath(fullName); var uploaded = await StorageClient.UploadAsync(StorageOptions.DriveId, path, stream, options.MimeType, cancellationToken); - return Result.Succeed(ToBlobMetadata(uploaded, path)); + return Result.Succeed(ToBlobMetadata(uploaded, fullName)); } catch (Exception ex) { @@ -165,11 +174,12 @@ protected override async Task> GetBlobMetadataInternalAsync try { await EnsureContainerExist(cancellationToken); - var path = BuildFullPath(options.FullPath); + var fullName = NormalizeRelativePath(options.FullPath); + var path = BuildFullPath(fullName); var item = await StorageClient.GetMetadataAsync(StorageOptions.DriveId, path, cancellationToken); return item == null ? Result.Fail(new FileNotFoundException($"File '{path}' not found in OneDrive.")) - : Result.Succeed(ToBlobMetadata(item, path)); + : Result.Succeed(ToBlobMetadata(item, fullName)); } catch (Exception ex) { @@ -183,7 +193,9 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st await EnsureContainerExist(cancellationToken); var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!); - await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken)) + var directoryPath = normalizedDirectory == null ? BuildFullPath(string.Empty) : BuildFullPath(normalizedDirectory); + var listPath = string.IsNullOrWhiteSpace(directoryPath) ? null : directoryPath; + await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, listPath, cancellationToken)) { cancellationToken.ThrowIfCancellationRequested(); if (item == null || item.Folder != null) diff --git a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj index e6bad4e..47141dd 100644 --- a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj +++ b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj @@ -20,19 +20,19 @@ runtime; build; native; contentfiles; analyzers; buildtransitive - - - + + + - - - - - - - + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive @@ -55,6 +55,7 @@ + diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs new file mode 100644 index 0000000..53d4561 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs @@ -0,0 +1,214 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Google.Apis.Drive.v3.Data; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using ManagedCode.Storage.Dropbox; +using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Extensions; +using ManagedCode.Storage.GoogleDrive; +using ManagedCode.Storage.GoogleDrive.Clients; +using ManagedCode.Storage.GoogleDrive.Extensions; +using ManagedCode.Storage.OneDrive; +using ManagedCode.Storage.OneDrive.Clients; +using ManagedCode.Storage.OneDrive.Extensions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Graph.Models; +using Shouldly; +using Xunit; +using DriveFile = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class CloudDriveDependencyInjectionTests +{ + [Fact] + public void Dropbox_AddAsDefault_ShouldResolveIStorage() + { + var services = new ServiceCollection(); + services.AddDropboxStorageAsDefault(options => + { + options.RootPath = "/apps/demo"; + options.Client = new StubDropboxClient(); + options.CreateContainerIfNotExists = true; + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredService(); + var typed = provider.GetRequiredService(); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void Dropbox_AddAsDefault_Keyed_ShouldResolveKeyedIStorage() + { + var services = new ServiceCollection(); + services.AddDropboxStorageAsDefault("tenant-a", options => + { + options.RootPath = "/apps/demo"; + options.Client = new StubDropboxClient(); + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredKeyedService("tenant-a"); + var typed = provider.GetRequiredKeyedService("tenant-a"); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void Dropbox_WhenClientNotConfigured_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddDropboxStorage(options => options.RootPath = "/apps/demo")); + } + + [Fact] + public void GoogleDrive_AddAsDefault_ShouldResolveIStorage() + { + var services = new ServiceCollection(); + services.AddGoogleDriveStorageAsDefault(options => + { + options.RootFolderId = "root"; + options.Client = new StubGoogleDriveClient(); + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredService(); + var typed = provider.GetRequiredService(); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void GoogleDrive_AddAsDefault_Keyed_ShouldResolveKeyedIStorage() + { + var services = new ServiceCollection(); + services.AddGoogleDriveStorageAsDefault("tenant-a", options => + { + options.RootFolderId = "root"; + options.Client = new StubGoogleDriveClient(); + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredKeyedService("tenant-a"); + var typed = provider.GetRequiredKeyedService("tenant-a"); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void GoogleDrive_WhenClientNotConfigured_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddGoogleDriveStorage(options => options.RootFolderId = "root")); + } + + [Fact] + public void OneDrive_AddAsDefault_ShouldResolveIStorage() + { + var services = new ServiceCollection(); + services.AddOneDriveStorageAsDefault(options => + { + options.DriveId = "me"; + options.RootPath = "demo"; + options.Client = new StubOneDriveClient(); + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredService(); + var typed = provider.GetRequiredService(); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void OneDrive_AddAsDefault_Keyed_ShouldResolveKeyedIStorage() + { + var services = new ServiceCollection(); + services.AddOneDriveStorageAsDefault("tenant-a", options => + { + options.DriveId = "me"; + options.RootPath = "demo"; + options.Client = new StubOneDriveClient(); + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredKeyedService("tenant-a"); + var typed = provider.GetRequiredKeyedService("tenant-a"); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void OneDrive_WhenDriveIdMissing_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddOneDriveStorage(options => + { + options.Client = new StubOneDriveClient(); + options.DriveId = string.Empty; + })); + } + + private sealed class StubDropboxClient : IDropboxClientWrapper + { + public Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string rootPath, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } + + private sealed class StubGoogleDriveClient : IGoogleDriveClient + { + public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string rootFolderId, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } + + private sealed class StubOneDriveClient : IOneDriveClient + { + public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs new file mode 100644 index 0000000..69c789d --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs @@ -0,0 +1,184 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Google.Apis.Drive.v3.Data; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Dropbox; +using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Options; +using ManagedCode.Storage.GoogleDrive; +using ManagedCode.Storage.GoogleDrive.Clients; +using ManagedCode.Storage.GoogleDrive.Options; +using ManagedCode.Storage.OneDrive; +using ManagedCode.Storage.OneDrive.Clients; +using ManagedCode.Storage.OneDrive.Options; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Graph.Models; +using Shouldly; +using Xunit; +using DriveFile = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class CloudDriveStorageProviderTests +{ + [Fact] + public void DropboxStorageProvider_CreateStorage_ShouldUseDropboxOptions() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var defaultOptions = new DropboxStorageOptions + { + RootPath = "/apps/demo", + Client = new StubDropboxClient(), + CreateContainerIfNotExists = true + }; + + var provider = new DropboxStorageProvider(serviceProvider, defaultOptions); + provider.StorageOptionsType.ShouldBe(typeof(DropboxStorageOptions)); + + var cloned = provider.GetDefaultOptions().ShouldBeOfType(); + cloned.ShouldNotBeSameAs(defaultOptions); + cloned.RootPath.ShouldBe(defaultOptions.RootPath); + cloned.Client.ShouldBeSameAs(defaultOptions.Client); + cloned.CreateContainerIfNotExists.ShouldBe(defaultOptions.CreateContainerIfNotExists); + + var storage = provider.CreateStorage(cloned); + storage.ShouldBeOfType(); + } + + [Fact] + public void DropboxStorageProvider_WhenOptionsWrong_ShouldThrow() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var provider = new DropboxStorageProvider(serviceProvider, new DropboxStorageOptions { RootPath = "/apps/demo", Client = new StubDropboxClient() }); + + Should.Throw(() => + provider.CreateStorage(new GoogleDriveStorageOptions { Client = new StubGoogleDriveClient() })); + } + + [Fact] + public void GoogleDriveStorageProvider_CreateStorage_ShouldUseGoogleDriveOptions() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var defaultOptions = new GoogleDriveStorageOptions + { + RootFolderId = "root", + Client = new StubGoogleDriveClient(), + CreateContainerIfNotExists = true + }; + + var provider = new GoogleDriveStorageProvider(serviceProvider, defaultOptions); + provider.StorageOptionsType.ShouldBe(typeof(GoogleDriveStorageOptions)); + + var cloned = provider.GetDefaultOptions().ShouldBeOfType(); + cloned.ShouldNotBeSameAs(defaultOptions); + cloned.RootFolderId.ShouldBe(defaultOptions.RootFolderId); + cloned.Client.ShouldBeSameAs(defaultOptions.Client); + cloned.CreateContainerIfNotExists.ShouldBe(defaultOptions.CreateContainerIfNotExists); + + var storage = provider.CreateStorage(cloned); + storage.ShouldBeOfType(); + } + + [Fact] + public void GoogleDriveStorageProvider_WhenOptionsWrong_ShouldThrow() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var provider = new GoogleDriveStorageProvider(serviceProvider, new GoogleDriveStorageOptions { RootFolderId = "root", Client = new StubGoogleDriveClient() }); + + Should.Throw(() => + provider.CreateStorage(new OneDriveStorageOptions { DriveId = "me", Client = new StubOneDriveClient() })); + } + + [Fact] + public void OneDriveStorageProvider_CreateStorage_ShouldUseOneDriveOptions() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var defaultOptions = new OneDriveStorageOptions + { + DriveId = "me", + RootPath = "demo", + Client = new StubOneDriveClient(), + CreateContainerIfNotExists = true + }; + + var provider = new OneDriveStorageProvider(serviceProvider, defaultOptions); + provider.StorageOptionsType.ShouldBe(typeof(OneDriveStorageOptions)); + + var cloned = provider.GetDefaultOptions().ShouldBeOfType(); + cloned.ShouldNotBeSameAs(defaultOptions); + cloned.DriveId.ShouldBe(defaultOptions.DriveId); + cloned.RootPath.ShouldBe(defaultOptions.RootPath); + cloned.Client.ShouldBeSameAs(defaultOptions.Client); + cloned.CreateContainerIfNotExists.ShouldBe(defaultOptions.CreateContainerIfNotExists); + + var storage = provider.CreateStorage(cloned); + storage.ShouldBeOfType(); + } + + [Fact] + public void OneDriveStorageProvider_WhenOptionsWrong_ShouldThrow() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var provider = new OneDriveStorageProvider(serviceProvider, new OneDriveStorageOptions { DriveId = "me", RootPath = "demo", Client = new StubOneDriveClient() }); + + Should.Throw(() => + provider.CreateStorage(new DropboxStorageOptions { RootPath = "/apps/demo", Client = new StubDropboxClient() })); + } + + private sealed class StubDropboxClient : IDropboxClientWrapper + { + public Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string rootPath, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } + + private sealed class StubGoogleDriveClient : IGoogleDriveClient + { + public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string rootFolderId, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } + + private sealed class StubOneDriveClient : IOneDriveClient + { + public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken) => throw new NotImplementedException(); + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs index 520f864..7c00886 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs @@ -37,14 +37,21 @@ public async Task OneDrive_FakeClient_RoundTrip() var uploadResult = await storage.UploadAsync("hello world", options => options.FileName = "text.txt"); uploadResult.IsSuccess.ShouldBeTrue(); + uploadResult.Value.FullName.ShouldBe("text.txt"); + uploadResult.Value.Container.ShouldBe("drive"); var exists = await storage.ExistsAsync("text.txt"); + exists.IsSuccess.ShouldBeTrue(); exists.Value.ShouldBeTrue(); var metadata = await storage.GetBlobMetadataAsync("text.txt"); + metadata.IsSuccess.ShouldBeTrue(); metadata.Value.Name.ShouldBe("text.txt"); + metadata.Value.FullName.ShouldBe("text.txt"); + metadata.Value.Container.ShouldBe("drive"); var download = await storage.DownloadAsync("text.txt"); + download.IsSuccess.ShouldBeTrue(); using var reader = new StreamReader(download.Value.FileStream); (await reader.ReadToEndAsync()).ShouldBe("hello world"); @@ -54,7 +61,7 @@ public async Task OneDrive_FakeClient_RoundTrip() listed.Add(item); } - listed.ShouldContain(m => m.FullName.EndsWith("text.txt")); + listed.ShouldContain(m => m.FullName == "text.txt"); } [Fact] @@ -84,14 +91,20 @@ public async Task GoogleDrive_FakeClient_RoundTrip() var uploadResult = await storage.UploadAsync("drive content", options => options.FileName = "data.bin"); uploadResult.IsSuccess.ShouldBeTrue(); + uploadResult.Value.FullName.ShouldBe("data.bin"); + uploadResult.Value.Container.ShouldBe("root"); var exists = await storage.ExistsAsync("data.bin"); + exists.IsSuccess.ShouldBeTrue(); exists.Value.ShouldBeTrue(); var metadata = await storage.GetBlobMetadataAsync("data.bin"); + metadata.IsSuccess.ShouldBeTrue(); metadata.Value.FullName.ShouldBe("data.bin"); + metadata.Value.Container.ShouldBe("root"); var download = await storage.DownloadAsync("data.bin"); + download.IsSuccess.ShouldBeTrue(); using var reader = new StreamReader(download.Value.FileStream); (await reader.ReadToEndAsync()).ShouldBe("drive content"); @@ -101,7 +114,7 @@ public async Task GoogleDrive_FakeClient_RoundTrip() listed.Add(item); } - listed.ShouldContain(m => m.FullName.Contains("data.bin")); + listed.ShouldContain(m => m.FullName == "data.bin"); } [Fact] @@ -116,14 +129,23 @@ public async Task Dropbox_FakeClient_RoundTrip() var uploadResult = await storage.UploadAsync("dropbox payload", options => options.FileName = "file.json"); uploadResult.IsSuccess.ShouldBeTrue(); + uploadResult.Value.FullName.ShouldBe("file.json"); + uploadResult.Value.Container.ShouldBe("/apps/demo"); + uploadResult.Value.MimeType.ShouldBe("application/json"); var exists = await storage.ExistsAsync("file.json"); + exists.IsSuccess.ShouldBeTrue(); exists.Value.ShouldBeTrue(); var metadata = await storage.GetBlobMetadataAsync("file.json"); + metadata.IsSuccess.ShouldBeTrue(); metadata.Value.Name.ShouldBe("file.json"); + metadata.Value.FullName.ShouldBe("file.json"); + metadata.Value.Container.ShouldBe("/apps/demo"); + metadata.Value.MimeType.ShouldBe("application/json"); var download = await storage.DownloadAsync("file.json"); + download.IsSuccess.ShouldBeTrue(); using var reader = new StreamReader(download.Value.FileStream); (await reader.ReadToEndAsync()).ShouldBe("dropbox payload"); @@ -133,7 +155,140 @@ public async Task Dropbox_FakeClient_RoundTrip() listed.Add(item); } - listed.ShouldContain(m => m.FullName.Contains("file.json")); + listed.ShouldContain(m => m.FullName == "file.json"); + } + + [Fact] + public async Task OneDrive_DeleteDirectory_ShouldDeleteOnlyDirectoryContent() + { + var fakeClient = new FakeOneDriveClient(); + var storage = new OneDriveStorage(new OneDriveStorageOptions + { + Client = fakeClient, + DriveId = "drive", + RootPath = "root" + }); + + (await storage.UploadAsync("dir-1", options => + { + options.Directory = "dir"; + options.FileName = "a.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("dir-2", options => + { + options.Directory = "dir"; + options.FileName = "b.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("keep", options => options.FileName = "keep.txt")).IsSuccess.ShouldBeTrue(); + + var deleteResult = await storage.DeleteDirectoryAsync("dir"); + deleteResult.IsSuccess.ShouldBeTrue(); + + var dirAExists = await storage.ExistsAsync("dir/a.txt"); + dirAExists.IsSuccess.ShouldBeTrue(); + dirAExists.Value.ShouldBeFalse(); + + var keepExists = await storage.ExistsAsync("keep.txt"); + keepExists.IsSuccess.ShouldBeTrue(); + keepExists.Value.ShouldBeTrue(); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync("dir")) + { + listed.Add(item); + } + + listed.ShouldBeEmpty(); + } + + [Fact] + public async Task GoogleDrive_DeleteDirectory_ShouldDeleteOnlyDirectoryContent() + { + var fakeClient = new FakeGoogleDriveClient(); + var storage = new GoogleDriveStorage(new GoogleDriveStorageOptions + { + Client = fakeClient, + RootFolderId = "root" + }); + + (await storage.UploadAsync("dir-1", options => + { + options.Directory = "dir"; + options.FileName = "a.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("dir-2", options => + { + options.Directory = "dir"; + options.FileName = "b.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("keep", options => options.FileName = "keep.txt")).IsSuccess.ShouldBeTrue(); + + var deleteResult = await storage.DeleteDirectoryAsync("dir"); + deleteResult.IsSuccess.ShouldBeTrue(); + + var dirAExists = await storage.ExistsAsync("dir/a.txt"); + dirAExists.IsSuccess.ShouldBeTrue(); + dirAExists.Value.ShouldBeFalse(); + + var keepExists = await storage.ExistsAsync("keep.txt"); + keepExists.IsSuccess.ShouldBeTrue(); + keepExists.Value.ShouldBeTrue(); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync("dir")) + { + listed.Add(item); + } + + listed.ShouldBeEmpty(); + } + + [Fact] + public async Task Dropbox_DeleteDirectory_ShouldDeleteOnlyDirectoryContent() + { + var fakeClient = new FakeDropboxClient(); + var storage = new DropboxStorage(new DropboxStorageOptions + { + Client = fakeClient, + RootPath = "/apps/demo" + }); + + (await storage.UploadAsync("dir-1", options => + { + options.Directory = "dir"; + options.FileName = "a.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("dir-2", options => + { + options.Directory = "dir"; + options.FileName = "b.txt"; + })).IsSuccess.ShouldBeTrue(); + + (await storage.UploadAsync("keep", options => options.FileName = "keep.txt")).IsSuccess.ShouldBeTrue(); + + var deleteResult = await storage.DeleteDirectoryAsync("dir"); + deleteResult.IsSuccess.ShouldBeTrue(); + + var dirAExists = await storage.ExistsAsync("dir/a.txt"); + dirAExists.IsSuccess.ShouldBeTrue(); + dirAExists.Value.ShouldBeFalse(); + + var keepExists = await storage.ExistsAsync("keep.txt"); + keepExists.IsSuccess.ShouldBeTrue(); + keepExists.Value.ShouldBeTrue(); + + var listed = new List(); + await foreach (var item in storage.GetBlobMetadataListAsync("dir")) + { + listed.Add(item); + } + + listed.ShouldBeEmpty(); } private class FakeOneDriveClient : IOneDriveClient @@ -238,37 +393,63 @@ public Task EnsureRootAsync(string rootPath, bool createIfNotExists, Cancellatio public Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken) { - var entry = _drive.Save(path, content, contentType); - return Task.FromResult(entry.ToDropboxFile(path)); + var fullPath = Combine(rootPath, path); + var entry = _drive.Save(fullPath, content, contentType); + return Task.FromResult(entry.ToDropboxFile(fullPath)); } public Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken) { - return Task.FromResult(_drive.Download(path)); + return Task.FromResult(_drive.Download(Combine(rootPath, path))); } public Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken) { - return Task.FromResult(_drive.Delete(path)); + return Task.FromResult(_drive.Delete(Combine(rootPath, path))); } public Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken) { - return Task.FromResult(_drive.Exists(path)); + return Task.FromResult(_drive.Exists(Combine(rootPath, path))); } public Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken) { - return Task.FromResult(_drive.Get(path)?.ToDropboxFile(path)); + var fullPath = Combine(rootPath, path); + return Task.FromResult(_drive.Get(fullPath)?.ToDropboxFile(fullPath)); } public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken) { - await foreach (var entry in _drive.List(directory, cancellationToken)) + var fullPath = Combine(rootPath, directory ?? string.Empty); + await foreach (var entry in _drive.List(fullPath, cancellationToken)) { yield return entry.ToDropboxFile(entry.Path); } } + + private static string Normalize(string path) + { + var normalized = path.Replace("\\", "/"); + if (!normalized.StartsWith('/')) + { + normalized = "/" + normalized; + } + + return normalized.TrimEnd('/') == string.Empty ? "/" : normalized.TrimEnd('/'); + } + + private static string Combine(string root, string path) + { + var normalizedRoot = Normalize(root); + var normalizedPath = path.Replace("\\", "/").Trim('/'); + if (string.IsNullOrWhiteSpace(normalizedPath)) + { + return normalizedRoot; + } + + return normalizedRoot.EndsWith("/") ? normalizedRoot + normalizedPath : normalizedRoot + "/" + normalizedPath; + } } private class InMemoryDrive @@ -297,7 +478,24 @@ public DriveEntry Save(string path, Stream content, string? contentType) public bool Delete(string path) { - return _entries.Remove(Normalize(path)); + var normalized = Normalize(path); + if (string.IsNullOrWhiteSpace(normalized)) + { + var count = _entries.Count; + _entries.Clear(); + return count > 0; + } + + var keys = _entries.Keys + .Where(key => key == normalized || key.StartsWith(normalized + "/")) + .ToList(); + + foreach (var key in keys) + { + _entries.Remove(key); + } + + return keys.Count > 0; } public bool Exists(string path) @@ -327,13 +525,13 @@ public async IAsyncEnumerable List(string? directory, [EnumeratorCan foreach (var entry in _entries.Values) { cancellationToken.ThrowIfCancellationRequested(); - if (normalized == null || entry.Path.StartsWith(normalized)) + if (normalized == null + || string.Equals(entry.Path, normalized) + || entry.Path.StartsWith(normalized + "/")) { yield return entry; } } - - await Task.CompletedTask; } private string Normalize(string path) diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs new file mode 100644 index 0000000..07fe94f --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs @@ -0,0 +1,399 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Dropbox.Api; +using ManagedCode.Storage.Dropbox.Clients; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class DropboxClientWrapperHttpTests +{ + [Fact] + public async Task DropboxClientWrapper_WithHttpHandler_RoundTrip() + { + var handler = new FakeDropboxHttpHandler(); + var httpClient = new HttpClient(handler); + var config = new DropboxClientConfig("ManagedCode.Storage.Tests") + { + HttpClient = httpClient + }; + + using var dropboxClient = new DropboxClient("test-token", config); + var wrapper = new DropboxClientWrapper(dropboxClient); + + await wrapper.EnsureRootAsync("/apps/demo", true, CancellationToken.None); + + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("dropbox payload"))) + { + var uploaded = await wrapper.UploadAsync("/apps/demo", "file.json", uploadStream, "application/json", CancellationToken.None); + uploaded.Name.ShouldBe("file.json"); + uploaded.Path.ShouldBe("/apps/demo/file.json"); + } + + (await wrapper.ExistsAsync("/apps/demo", "file.json", CancellationToken.None)).ShouldBeTrue(); + + await using (var downloaded = await wrapper.DownloadAsync("/apps/demo", "file.json", CancellationToken.None)) + using (var reader = new StreamReader(downloaded, Encoding.UTF8)) + { + (await reader.ReadToEndAsync()).ShouldBe("dropbox payload"); + } + + var items = new List(); + await foreach (var item in wrapper.ListAsync("/apps/demo", null, CancellationToken.None)) + { + items.Add(item); + } + + items.ShouldContain(i => i.Name == "file.json"); + + (await wrapper.DeleteAsync("/apps/demo", "file.json", CancellationToken.None)).ShouldBeTrue(); + (await wrapper.DeleteAsync("/apps/demo", "file.json", CancellationToken.None)).ShouldBeFalse(); + } + + private sealed class FakeDropboxHttpHandler : HttpMessageHandler + { + private const string ApiHost = "api.dropboxapi.com"; + private const string ContentHost = "content.dropboxapi.com"; + + private readonly Dictionary _entries = new(StringComparer.OrdinalIgnoreCase); + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var host = request.RequestUri?.Host ?? string.Empty; + var path = request.RequestUri?.AbsolutePath ?? string.Empty; + + if (host.Equals(ApiHost, StringComparison.OrdinalIgnoreCase)) + { + return await HandleApiAsync(request, path, cancellationToken); + } + + if (host.Equals(ContentHost, StringComparison.OrdinalIgnoreCase)) + { + return await HandleContentAsync(request, path, cancellationToken); + } + + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + private async Task HandleApiAsync(HttpRequestMessage request, string path, CancellationToken cancellationToken) + { + if (request.Method != HttpMethod.Post) + { + return new HttpResponseMessage(HttpStatusCode.MethodNotAllowed); + } + + var body = request.Content == null ? string.Empty : await request.Content.ReadAsStringAsync(cancellationToken); + var json = string.IsNullOrWhiteSpace(body) ? null : JsonDocument.Parse(body); + + if (path.Equals("/2/files/get_metadata", StringComparison.OrdinalIgnoreCase)) + { + var metadataPath = ReadPath(json); + if (!_entries.TryGetValue(NormalizeLower(metadataPath), out var entry)) + { + return PathNotFoundError(); + } + + return JsonResponse(ToMetadata(entry)); + } + + if (path.Equals("/2/files/create_folder_v2", StringComparison.OrdinalIgnoreCase)) + { + var folderPath = ReadPath(json); + var normalized = NormalizeDisplay(folderPath); + var created = EnsureFolder(normalized); + return JsonResponse(new Dictionary + { + ["metadata"] = ToMetadata(created) + }); + } + + if (path.Equals("/2/files/list_folder", StringComparison.OrdinalIgnoreCase)) + { + var folderPath = ReadPath(json); + var normalizedFolder = NormalizeLower(NormalizeDisplay(folderPath)); + var entries = ListChildren(normalizedFolder).Select(ToMetadata).ToList(); + + return JsonResponse(new Dictionary + { + ["entries"] = entries, + ["cursor"] = "cursor-1", + ["has_more"] = false + }); + } + + if (path.Equals("/2/files/list_folder/continue", StringComparison.OrdinalIgnoreCase)) + { + return JsonResponse(new Dictionary + { + ["entries"] = Array.Empty(), + ["cursor"] = "cursor-1", + ["has_more"] = false + }); + } + + if (path.Equals("/2/files/delete_v2", StringComparison.OrdinalIgnoreCase)) + { + var deletePath = ReadPath(json); + var normalized = NormalizeLower(NormalizeDisplay(deletePath)); + var deleted = DeleteRecursive(normalized); + if (deleted == null) + { + return PathLookupNotFoundError(); + } + + return JsonResponse(new Dictionary + { + ["metadata"] = ToMetadata(deleted) + }); + } + + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + private async Task HandleContentAsync(HttpRequestMessage request, string path, CancellationToken cancellationToken) + { + if (request.Method != HttpMethod.Post) + { + return new HttpResponseMessage(HttpStatusCode.MethodNotAllowed); + } + + if (!request.Headers.TryGetValues("Dropbox-API-Arg", out var args)) + { + return new HttpResponseMessage(HttpStatusCode.BadRequest); + } + + var argJson = JsonDocument.Parse(args.First()); + var fullPath = ReadPath(argJson); + var normalizedDisplay = NormalizeDisplay(fullPath); + var normalizedLower = NormalizeLower(normalizedDisplay); + + if (path.Equals("/2/files/upload", StringComparison.OrdinalIgnoreCase)) + { + var content = request.Content == null + ? Array.Empty() + : await request.Content.ReadAsByteArrayAsync(cancellationToken); + + EnsureFolder(ParentPath(normalizedDisplay)); + var entry = UpsertFile(normalizedDisplay, content); + return JsonResponse(ToMetadata(entry)); + } + + if (path.Equals("/2/files/download", StringComparison.OrdinalIgnoreCase)) + { + if (!_entries.TryGetValue(normalizedLower, out var entry) || entry.IsFolder) + { + return PathNotFoundError(); + } + + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(entry.Content) + }; + + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); + response.Headers.Add("Dropbox-API-Result", JsonSerializer.Serialize(ToMetadata(entry))); + return response; + } + + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + private Entry EnsureFolder(string folderPathDisplay) + { + var normalized = NormalizeDisplay(folderPathDisplay); + if (string.IsNullOrWhiteSpace(normalized) || normalized == "/") + { + return Entry.Folder("/", "/", "id:root"); + } + + var lower = NormalizeLower(normalized); + if (_entries.TryGetValue(lower, out var existing) && existing.IsFolder) + { + return existing; + } + + var parent = ParentPath(normalized); + EnsureFolder(parent); + + var name = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries).Last(); + var created = Entry.Folder(name, normalized, "id:folder:" + Guid.NewGuid().ToString("N")); + _entries[lower] = created; + return created; + } + + private Entry UpsertFile(string filePathDisplay, byte[] content) + { + var normalizedDisplay = NormalizeDisplay(filePathDisplay); + var lower = NormalizeLower(normalizedDisplay); + var name = normalizedDisplay.Split('/', StringSplitOptions.RemoveEmptyEntries).Last(); + var entry = Entry.File(name, normalizedDisplay, "id:file:" + Guid.NewGuid().ToString("N"), content); + _entries[lower] = entry; + return entry; + } + + private IEnumerable ListChildren(string folderPathLower) + { + var normalized = NormalizeLower(NormalizeDisplay(folderPathLower)); + var prefix = normalized == "/" ? "/" : normalized.TrimEnd('/') + "/"; + return _entries.Values.Where(e => !e.PathLower.Equals(folderPathLower, StringComparison.OrdinalIgnoreCase) + && string.Equals(ParentPath(e.PathLower), normalized, StringComparison.OrdinalIgnoreCase) + && e.PathLower.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)); + } + + private Entry? DeleteRecursive(string normalizedLower) + { + if (!_entries.TryGetValue(normalizedLower, out var entry)) + { + return null; + } + + var keys = _entries.Keys + .Where(k => k.Equals(normalizedLower, StringComparison.OrdinalIgnoreCase) + || k.StartsWith(normalizedLower.TrimEnd('/') + "/", StringComparison.OrdinalIgnoreCase)) + .ToList(); + + foreach (var key in keys) + { + _entries.Remove(key); + } + + return entry; + } + + private static string ReadPath(JsonDocument? document) + { + if (document == null) + { + return string.Empty; + } + + return document.RootElement.TryGetProperty("path", out var value) ? value.GetString() ?? string.Empty : string.Empty; + } + + private static string NormalizeDisplay(string path) + { + var normalized = (path ?? string.Empty).Replace("\\", "/").Trim(); + if (string.IsNullOrWhiteSpace(normalized) || normalized == "/") + { + return "/"; + } + + if (!normalized.StartsWith('/')) + { + normalized = "/" + normalized; + } + + return normalized.TrimEnd('/'); + } + + private static string NormalizeLower(string path) + { + var display = NormalizeDisplay(path); + return display == "/" ? "/" : display.ToLowerInvariant(); + } + + private static string ParentPath(string path) + { + var normalized = NormalizeDisplay(path); + if (normalized == "/") + { + return "/"; + } + + var lastSlash = normalized.LastIndexOf('/'); + return lastSlash <= 0 ? "/" : normalized[..lastSlash]; + } + + private static Dictionary ToMetadata(Entry entry) + { + if (entry.IsFolder) + { + return new Dictionary + { + [".tag"] = "folder", + ["name"] = entry.Name, + ["path_lower"] = entry.PathLower, + ["path_display"] = entry.PathDisplay, + ["id"] = entry.Id + }; + } + + return new Dictionary + { + [".tag"] = "file", + ["name"] = entry.Name, + ["path_lower"] = entry.PathLower, + ["path_display"] = entry.PathDisplay, + ["id"] = entry.Id, + ["client_modified"] = entry.ClientModified.ToString("O"), + ["server_modified"] = entry.ServerModified.ToString("O"), + ["rev"] = entry.Rev, + ["size"] = entry.Content.LongLength + }; + } + + private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var response = new HttpResponseMessage(statusCode) + { + Content = new StringContent(JsonSerializer.Serialize(payload)) + }; + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); + return response; + } + + private static HttpResponseMessage PathNotFoundError() + { + return JsonResponse(new Dictionary + { + ["error_summary"] = "path/not_found/.", + ["error"] = new Dictionary + { + [".tag"] = "path", + ["path"] = new Dictionary + { + [".tag"] = "not_found" + } + } + }, HttpStatusCode.Conflict); + } + + private static HttpResponseMessage PathLookupNotFoundError() + { + return JsonResponse(new Dictionary + { + ["error_summary"] = "path_lookup/not_found/", + ["error"] = new Dictionary + { + [".tag"] = "path_lookup", + ["path_lookup"] = new Dictionary + { + [".tag"] = "not_found" + } + } + }, HttpStatusCode.Conflict); + } + + private sealed record Entry(string Name, string PathDisplay, string Id, bool IsFolder, byte[] Content, DateTime ClientModified, DateTime ServerModified, string Rev) + { + public string PathLower => PathDisplay == "/" ? "/" : PathDisplay.ToLowerInvariant(); + + public static Entry Folder(string name, string pathDisplay, string id) + => new(name, pathDisplay, id, true, Array.Empty(), DateTime.UtcNow, DateTime.UtcNow, "rev-folder"); + + public static Entry File(string name, string pathDisplay, string id, byte[] content) + => new(name, pathDisplay, id, false, content, DateTime.UtcNow, DateTime.UtcNow, "rev-file"); + } + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs new file mode 100644 index 0000000..bb9afe6 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs @@ -0,0 +1,339 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Google.Apis.Drive.v3; +using Google.Apis.Drive.v3.Data; +using Google.Apis.Http; +using Google.Apis.Services; +using ManagedCode.Storage.GoogleDrive.Clients; +using Shouldly; +using Xunit; +using DriveFile = Google.Apis.Drive.v3.Data.File; + +namespace ManagedCode.Storage.Tests.Storages.CloudDrive; + +public class GoogleDriveClientHttpTests +{ + private const string RootFolderId = "root"; + + [Fact] + public async Task GoogleDriveClient_WithHttpHandler_RoundTrip() + { + var handler = new FakeGoogleDriveHttpHandler(); + var driveService = CreateDriveService(handler); + var client = new GoogleDriveClient(driveService); + + await client.EnsureRootAsync(RootFolderId, true, CancellationToken.None); + + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("google payload"))) + { + var uploaded = await client.UploadAsync(RootFolderId, "dir/file.txt", uploadStream, "text/plain", CancellationToken.None); + uploaded.Name.ShouldBe("file.txt"); + uploaded.Size.ShouldBe("google payload".Length); + } + + await using (var nestedStream = new MemoryStream(Encoding.UTF8.GetBytes("nested payload"))) + { + var uploaded = await client.UploadAsync(RootFolderId, "dir/sub/inner.txt", nestedStream, "text/plain", CancellationToken.None); + uploaded.Name.ShouldBe("inner.txt"); + } + + (await client.ExistsAsync(RootFolderId, "dir/file.txt", CancellationToken.None)).ShouldBeTrue(); + (await client.ExistsAsync(RootFolderId, "dir/sub/inner.txt", CancellationToken.None)).ShouldBeTrue(); + + await using (var downloaded = await client.DownloadAsync(RootFolderId, "dir/file.txt", CancellationToken.None)) + using (var reader = new StreamReader(downloaded, Encoding.UTF8)) + { + (await reader.ReadToEndAsync()).ShouldBe("google payload"); + } + + var listed = new List(); + await foreach (var item in client.ListAsync(RootFolderId, "dir", CancellationToken.None)) + { + listed.Add(item); + } + + listed.ShouldContain(f => f.Name == "file.txt"); + + (await client.DeleteAsync(RootFolderId, "dir", CancellationToken.None)).ShouldBeTrue(); + (await client.ExistsAsync(RootFolderId, "dir/file.txt", CancellationToken.None)).ShouldBeFalse(); + (await client.ExistsAsync(RootFolderId, "dir/sub/inner.txt", CancellationToken.None)).ShouldBeFalse(); + + var afterDelete = new List(); + await foreach (var item in client.ListAsync(RootFolderId, "dir", CancellationToken.None)) + { + afterDelete.Add(item); + } + + afterDelete.ShouldBeEmpty(); + (await client.DeleteAsync(RootFolderId, "dir", CancellationToken.None)).ShouldBeFalse(); + } + + private static DriveService CreateDriveService(HttpMessageHandler handler) + { + return new DriveService(new BaseClientService.Initializer + { + ApplicationName = "ManagedCode.Storage.Tests", + HttpClientInitializer = new NullCredentialInitializer(), + GZipEnabled = false, + HttpClientFactory = new HttpClientFromMessageHandlerFactory(_ => + new HttpClientFromMessageHandlerFactory.ConfiguredHttpMessageHandler(handler, false, false)) + }); + } + + private sealed class NullCredentialInitializer : IConfigurableHttpClientInitializer + { + public void Initialize(ConfigurableHttpClient httpClient) + { + } + } + + private sealed class FakeGoogleDriveHttpHandler : HttpMessageHandler + { + private const string FolderMimeType = "application/vnd.google-apps.folder"; + private readonly Dictionary _entriesById = new(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary<(string ParentId, string Name), string> _idByParentAndName = new(); + private readonly Dictionary _pendingUploads = new(StringComparer.OrdinalIgnoreCase); + private int _counter; + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var path = request.RequestUri?.AbsolutePath ?? string.Empty; + var query = ParseQuery(request.RequestUri?.Query); + + if (request.Method == HttpMethod.Get && path.Equals("/drive/v3/files", StringComparison.OrdinalIgnoreCase)) + { + var q = query.TryGetValue("q", out var qValue) ? qValue : string.Empty; + var files = List(q); + return JsonResponse(new + { + files = files.Select(ToResponse).ToList() + }); + } + + if (request.Method == HttpMethod.Post && path.Equals("/drive/v3/files", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + var model = JsonSerializer.Deserialize(body, new JsonSerializerOptions { PropertyNameCaseInsensitive = true }) + ?? throw new InvalidOperationException("Create request body is missing."); + + var parentId = model.Parents?.FirstOrDefault() ?? RootFolderId; + var mimeType = string.IsNullOrWhiteSpace(model.MimeType) ? "application/octet-stream" : model.MimeType; + var created = CreateEntry(name: model.Name ?? Guid.NewGuid().ToString("N"), parentId: parentId, mimeType: mimeType, content: Array.Empty()); + return JsonResponse(ToResponse(created)); + } + + if (request.Method == HttpMethod.Post + && path.Equals("/upload/drive/v3/files", StringComparison.OrdinalIgnoreCase) + && query.TryGetValue("uploadType", out var uploadType) + && string.Equals(uploadType, "resumable", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + var model = JsonSerializer.Deserialize(body, new JsonSerializerOptions { PropertyNameCaseInsensitive = true }) + ?? throw new InvalidOperationException("Upload initiation body is missing."); + + var uploadId = "upload-" + Interlocked.Increment(ref _counter); + _pendingUploads[uploadId] = new PendingUpload( + Name: model.Name ?? Guid.NewGuid().ToString("N"), + ParentId: model.Parents?.FirstOrDefault() ?? RootFolderId, + MimeType: model.MimeType ?? "application/octet-stream"); + + var response = new HttpResponseMessage(HttpStatusCode.OK); + response.Headers.Location = new Uri($"https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&upload_id={uploadId}"); + response.Content = new ByteArrayContent(Array.Empty()); + return response; + } + + if (request.Method == HttpMethod.Put + && path.Equals("/upload/drive/v3/files", StringComparison.OrdinalIgnoreCase) + && query.TryGetValue("uploadType", out uploadType) + && string.Equals(uploadType, "resumable", StringComparison.OrdinalIgnoreCase) + && query.TryGetValue("upload_id", out var uploadIdValue) + && _pendingUploads.TryGetValue(uploadIdValue, out var pending)) + { + var content = await request.Content!.ReadAsByteArrayAsync(cancellationToken); + var created = CreateEntry(pending.Name, pending.ParentId, pending.MimeType, content); + _pendingUploads.Remove(uploadIdValue); + return JsonResponse(ToResponse(created)); + } + + if (path.StartsWith("/drive/v3/files/", StringComparison.OrdinalIgnoreCase)) + { + var fileId = path["/drive/v3/files/".Length..]; + if (request.Method == HttpMethod.Delete) + { + if (!_entriesById.Remove(fileId)) + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + return new HttpResponseMessage(HttpStatusCode.NoContent); + } + + if (request.Method == HttpMethod.Get && query.TryGetValue("alt", out var alt) && string.Equals(alt, "media", StringComparison.OrdinalIgnoreCase)) + { + if (!_entriesById.TryGetValue(fileId, out var entry) || entry.MimeType == FolderMimeType) + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(entry.Content) + }; + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(entry.MimeType); + return response; + } + + if (request.Method == HttpMethod.Get) + { + if (!_entriesById.TryGetValue(fileId, out var entry)) + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + return JsonResponse(ToResponse(entry)); + } + } + + return new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"Unhandled Drive request: {request.Method} {request.RequestUri}") + }; + } + + private Entry CreateEntry(string name, string parentId, string mimeType, byte[] content) + { + var id = "id-" + Interlocked.Increment(ref _counter); + var entry = new Entry( + Id: id, + Name: name, + ParentId: parentId, + MimeType: mimeType, + Content: content, + Created: DateTimeOffset.UtcNow, + Modified: DateTimeOffset.UtcNow); + + _entriesById[id] = entry; + _idByParentAndName[(parentId, name)] = id; + return entry; + } + + private IEnumerable List(string q) + { + if (string.IsNullOrWhiteSpace(q)) + { + return Enumerable.Empty(); + } + + var parentId = ExtractFirstQuoted(q); + if (parentId == null) + { + return Enumerable.Empty(); + } + + var name = ExtractNameClause(q); + if (name != null) + { + return _idByParentAndName.TryGetValue((parentId, name), out var id) && _entriesById.TryGetValue(id, out var entry) + ? new[] { entry } + : Enumerable.Empty(); + } + + return _entriesById.Values.Where(e => string.Equals(e.ParentId, parentId, StringComparison.OrdinalIgnoreCase)); + } + + private static string? ExtractFirstQuoted(string value) + { + var first = value.IndexOf('\''); + if (first < 0) + { + return null; + } + + var second = value.IndexOf('\'', first + 1); + return second < 0 ? null : value.Substring(first + 1, second - first - 1); + } + + private static string? ExtractNameClause(string q) + { + const string marker = "name='"; + var start = q.IndexOf(marker, StringComparison.OrdinalIgnoreCase); + if (start < 0) + { + return null; + } + + start += marker.Length; + var end = q.IndexOf('\'', start); + return end < 0 ? null : q.Substring(start, end - start); + } + + private static Dictionary ParseQuery(string? query) + { + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (string.IsNullOrWhiteSpace(query)) + { + return result; + } + + foreach (var part in query.TrimStart('?').Split('&', StringSplitOptions.RemoveEmptyEntries)) + { + var kv = part.Split('=', 2); + var key = Uri.UnescapeDataString(kv[0]); + var value = kv.Length == 2 ? Uri.UnescapeDataString(kv[1]) : string.Empty; + result[key] = value; + } + + return result; + } + + private HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var bytes = Encoding.UTF8.GetBytes(json); + var response = new HttpResponseMessage(statusCode) + { + Content = new ByteArrayContent(bytes) + }; + + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); + response.Content.Headers.ContentLength = bytes.LongLength; + return response; + } + + private sealed record Entry(string Id, string Name, string ParentId, string MimeType, byte[] Content, DateTimeOffset Created, DateTimeOffset Modified); + + private sealed record PendingUpload(string Name, string ParentId, string MimeType); + + private sealed class CreateRequest + { + public string? Name { get; set; } + public IList? Parents { get; set; } + public string? MimeType { get; set; } + } + + private static object ToResponse(Entry entry) + { + return new + { + id = entry.Id, + name = entry.Name, + parents = new[] { entry.ParentId }, + mimeType = entry.MimeType, + createdTime = entry.Created.ToString("O"), + modifiedTime = entry.Modified.ToString("O"), + size = entry.MimeType == FolderMimeType ? null : entry.Content.LongLength.ToString() + }; + } + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs index 429d6b2..1cad420 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs @@ -21,6 +21,7 @@ namespace ManagedCode.Storage.Tests.Storages.CloudDrive; public class GraphOneDriveClientTests { private const string RootKey = "root"; + private const string DriveKey = "drive"; [Fact] public async Task GraphClient_EndToEnd() @@ -31,6 +32,14 @@ public async Task GraphClient_EndToEnd() await storageClient.EnsureRootAsync("me", "work", true, CancellationToken.None); + var rootItems = new List(); + await foreach (var item in storageClient.ListAsync("me", null, CancellationToken.None)) + { + rootItems.Add(item); + } + + rootItems.ShouldContain(i => i.Name == "work"); + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("graph payload"))) { var uploaded = await storageClient.UploadAsync("me", "work/doc.txt", uploadStream, "text/plain", CancellationToken.None); @@ -59,6 +68,18 @@ public async Task GraphClient_EndToEnd() (await storageClient.DeleteAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeTrue(); (await storageClient.ExistsAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeFalse(); + (await storageClient.DeleteAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeFalse(); + } + + [Fact] + public async Task EnsureRootAsync_WhenFolderMissing_AndCreateIfNotExistsFalse_ShouldThrow() + { + var handler = new FakeGraphHandler(); + var client = CreateGraphClient(handler); + var storageClient = new GraphOneDriveClient(client); + + await Should.ThrowAsync(() => + storageClient.EnsureRootAsync("me", "missing", false, CancellationToken.None)); } private static GraphServiceClient CreateGraphClient(HttpMessageHandler handler) @@ -97,9 +118,14 @@ protected override Task SendAsync(HttpRequestMessage reques { cancellationToken.ThrowIfCancellationRequested(); + if (IsMeDriveRequest(request.RequestUri)) + { + return Task.FromResult(JsonResponse(new Microsoft.Graph.Models.Drive { Id = DriveKey })); + } + if (IsRootRequest(request.RequestUri)) { - return Task.FromResult(JsonResponse(_entries[RootKey])); + return Task.FromResult(JsonResponse(GraphEntry.ToDriveItem(_entries[RootKey]))); } if (TryHandleChildrenRequest(request, out var childrenResponse)) @@ -112,29 +138,7 @@ protected override Task SendAsync(HttpRequestMessage reques return Task.FromResult(itemResponse); } - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)); - } - - private bool TryHandleItemRequest(HttpRequestMessage request, out HttpResponseMessage response) - { - response = new HttpResponseMessage(HttpStatusCode.NotFound); - var segments = request.RequestUri!.AbsolutePath.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - var contentSegment = segments.FirstOrDefault(s => s.EndsWith(":content", StringComparison.OrdinalIgnoreCase)); - - if (contentSegment != null) - { - var path = DecodePath(contentSegment.Replace(":content", string.Empty, StringComparison.OrdinalIgnoreCase)); - return HandleContentRequest(request, path, ref response); - } - - var itemWithPath = segments.FirstOrDefault(s => s.Contains(':')); - if (itemWithPath != null) - { - var path = DecodePath(itemWithPath.Trim(':')); - return HandleMetadataRequest(request.Method, path, ref response); - } - - return false; + return Task.FromResult(NotFoundResponse($"Unhandled Graph request: {request.Method} {request.RequestUri}")); } private bool HandleMetadataRequest(HttpMethod method, string path, ref HttpResponseMessage response) @@ -144,7 +148,7 @@ private bool HandleMetadataRequest(HttpMethod method, string path, ref HttpRespo { if (entry == null) { - response = new HttpResponseMessage(HttpStatusCode.NotFound); + response = NotFoundResponse(); return true; } @@ -155,11 +159,11 @@ private bool HandleMetadataRequest(HttpMethod method, string path, ref HttpRespo if (entry == null) { - response = new HttpResponseMessage(HttpStatusCode.NotFound); + response = NotFoundResponse(); return true; } - response = JsonResponse(entry); + response = JsonResponse(GraphEntry.ToDriveItem(entry)); return true; } @@ -175,14 +179,14 @@ private bool HandleContentRequest(HttpRequestMessage request, string path, ref H buffer.CopyTo(memory); var entry = GraphEntry.File(Path.GetFileName(path), parentPath, memory.ToArray()); _entries[entry.Id] = entry; - response = JsonResponse(entry); + response = JsonResponse(GraphEntry.ToDriveItem(entry)); return true; } var existing = _entries.Values.FirstOrDefault(v => string.Equals(v.Path, path, StringComparison.OrdinalIgnoreCase)); if (existing == null) { - response = new HttpResponseMessage(HttpStatusCode.NotFound); + response = NotFoundResponse(); return true; } @@ -194,6 +198,18 @@ private bool HandleContentRequest(HttpRequestMessage request, string path, ref H return true; } + private HttpResponseMessage NotFoundResponse(string? message = null) + { + return JsonResponse(new + { + error = new + { + code = "itemNotFound", + message = message ?? "Item not found." + } + }, HttpStatusCode.NotFound); + } + private bool TryHandleChildrenRequest(HttpRequestMessage request, out HttpResponseMessage response) { response = new HttpResponseMessage(HttpStatusCode.NotFound); @@ -217,7 +233,7 @@ private bool TryHandleChildrenRequest(HttpRequestMessage request, out HttpRespon var created = GraphEntry.Folder(item!.Name!, parentPath: _entries[idSegment ?? RootKey].Path); _entries[created.Id] = created; - response = JsonResponse(created, HttpStatusCode.Created); + response = JsonResponse(GraphEntry.ToDriveItem(created), HttpStatusCode.Created); return true; } @@ -232,7 +248,19 @@ private bool TryHandleChildrenRequest(HttpRequestMessage request, out HttpRespon private static bool IsRootRequest(Uri? requestUri) { - return requestUri != null && requestUri.AbsolutePath.TrimEnd('/').EndsWith("me/drive/root", StringComparison.OrdinalIgnoreCase); + if (requestUri == null) + { + return false; + } + + var path = requestUri.AbsolutePath.TrimEnd('/'); + return path.EndsWith("/me/drive/root", StringComparison.OrdinalIgnoreCase) + || path.EndsWith($"/drives/{DriveKey}/root", StringComparison.OrdinalIgnoreCase); + } + + private static bool IsMeDriveRequest(Uri? requestUri) + { + return requestUri != null && requestUri.AbsolutePath.TrimEnd('/').EndsWith("/me/drive", StringComparison.OrdinalIgnoreCase); } private void EnsureFolder(string path) @@ -255,21 +283,54 @@ private void EnsureFolder(string path) _entries[folder.Id] = folder; } - private static string DecodePath(string segment) + private static bool TryGetItemPath(Uri requestUri, out string path, out bool isContent) { - return Uri.UnescapeDataString(segment.Replace("root:", string.Empty, StringComparison.OrdinalIgnoreCase)).Trim('/'); + path = string.Empty; + isContent = false; + + var requestPath = Uri.UnescapeDataString(requestUri.AbsolutePath); + var markerIndex = requestPath.IndexOf("/root:", StringComparison.OrdinalIgnoreCase); + if (markerIndex < 0) + { + return false; + } + + var itemPath = requestPath[(markerIndex + "/root:".Length)..]; + if (itemPath.EndsWith(":/content", StringComparison.OrdinalIgnoreCase)) + { + isContent = true; + itemPath = itemPath[..^":/content".Length]; + } + + itemPath = itemPath.TrimEnd(':'); + path = itemPath.Trim('/'); + return true; } private static HttpResponseMessage JsonResponse(object content, HttpStatusCode status = HttpStatusCode.OK) { var response = new HttpResponseMessage(status) { - Content = new StringContent(JsonSerializer.Serialize(content)) + Content = new StringContent(JsonSerializer.Serialize(content, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase })) }; response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); return response; } + + private bool TryHandleItemRequest(HttpRequestMessage request, out HttpResponseMessage response) + { + response = new HttpResponseMessage(HttpStatusCode.NotFound); + + if (!TryGetItemPath(request.RequestUri!, out var path, out var isContent)) + { + return false; + } + + return isContent + ? HandleContentRequest(request, path, ref response) + : HandleMetadataRequest(request.Method, path, ref response); + } } private sealed class GraphEntry diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs new file mode 100644 index 0000000..be986e1 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs @@ -0,0 +1,72 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Storage.CloudKit.Clients; +using ManagedCode.Storage.CloudKit.Options; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudKit; + +public class CloudKitClientHttpTests +{ + [Fact] + public async Task CloudKitClient_WithHttpHandler_RoundTrip() + { + var handler = new FakeCloudKitHttpHandler(); + var httpClient = new HttpClient(handler); + + var options = new CloudKitStorageOptions + { + ContainerId = "iCloud.com.example.app", + Environment = CloudKitEnvironment.Development, + Database = CloudKitDatabase.Public, + ApiToken = "test-token", + RecordType = "MCStorageFile", + PathFieldName = "path", + ContentTypeFieldName = "contentType", + AssetFieldName = "file" + }; + + using var client = new CloudKitClient(options, httpClient); + + const string recordName = "record-1"; + const string internalPath = "app-data/dir/file.txt"; + + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("cloudkit payload"))) + { + var uploaded = await client.UploadAsync(recordName, internalPath, uploadStream, "text/plain", CancellationToken.None); + uploaded.RecordName.ShouldBe(recordName); + uploaded.Path.ShouldBe(internalPath); + uploaded.ContentType.ShouldBe("text/plain"); + uploaded.Size.ShouldBe((ulong)"cloudkit payload".Length); + } + + (await client.ExistsAsync(recordName, CancellationToken.None)).ShouldBeTrue(); + + await using (var downloaded = await client.DownloadAsync(recordName, CancellationToken.None)) + using (var reader = new StreamReader(downloaded, Encoding.UTF8)) + { + (await reader.ReadToEndAsync()).ShouldBe("cloudkit payload"); + } + + var listed = new List(); + await foreach (var record in client.QueryByPathPrefixAsync("app-data/dir/", CancellationToken.None)) + { + listed.Add(record); + } + + listed.ShouldContain(r => r.RecordName == recordName); + + (await client.DeleteAsync(recordName, CancellationToken.None)).ShouldBeTrue(); + (await client.ExistsAsync(recordName, CancellationToken.None)).ShouldBeFalse(); + (await client.DeleteAsync(recordName, CancellationToken.None)).ShouldBeFalse(); + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitDependencyInjectionTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitDependencyInjectionTests.cs new file mode 100644 index 0000000..4f1f82d --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitDependencyInjectionTests.cs @@ -0,0 +1,71 @@ +using ManagedCode.Storage.CloudKit; +using ManagedCode.Storage.CloudKit.Extensions; +using ManagedCode.Storage.CloudKit.Options; +using ManagedCode.Storage.Core; +using ManagedCode.Storage.Core.Exceptions; +using Microsoft.Extensions.DependencyInjection; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudKit; + +public class CloudKitDependencyInjectionTests +{ + [Fact] + public void CloudKit_AddAsDefault_ShouldResolveIStorage() + { + var services = new ServiceCollection(); + services.AddCloudKitStorageAsDefault(options => + { + options.ContainerId = "iCloud.com.example.app"; + options.ApiToken = "test-token"; + options.Environment = CloudKitEnvironment.Development; + options.Database = CloudKitDatabase.Public; + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredService(); + var typed = provider.GetRequiredService(); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void CloudKit_AddAsDefault_Keyed_ShouldResolveKeyedIStorage() + { + var services = new ServiceCollection(); + services.AddCloudKitStorageAsDefault("tenant-a", options => + { + options.ContainerId = "iCloud.com.example.app"; + options.ApiToken = "test-token"; + }); + + using var provider = services.BuildServiceProvider(); + var storage = provider.GetRequiredKeyedService("tenant-a"); + var typed = provider.GetRequiredKeyedService("tenant-a"); + + storage.ShouldBeSameAs(typed); + } + + [Fact] + public void CloudKit_WhenContainerIdMissing_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddCloudKitStorage(options => options.ApiToken = "test-token")); + } + + [Fact] + public void CloudKit_WhenApiTokenAndServerKeyProvided_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddCloudKitStorage(options => + { + options.ContainerId = "iCloud.com.example.app"; + options.ApiToken = "test-token"; + options.ServerToServerKeyId = "kid"; + options.ServerToServerPrivateKeyPem = "-----BEGIN PRIVATE KEY-----\n...\n-----END PRIVATE KEY-----"; + })); + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageProviderTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageProviderTests.cs new file mode 100644 index 0000000..73c9080 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageProviderTests.cs @@ -0,0 +1,73 @@ +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using ManagedCode.Storage.CloudKit; +using ManagedCode.Storage.CloudKit.Clients; +using ManagedCode.Storage.CloudKit.Options; +using ManagedCode.Storage.Core; +using Microsoft.Extensions.DependencyInjection; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudKit; + +public class CloudKitStorageProviderTests +{ + [Fact] + public void CloudKitStorageProvider_CreateStorage_ShouldUseCloudKitOptions() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var defaultOptions = new CloudKitStorageOptions + { + ContainerId = "iCloud.com.example.app", + ApiToken = "test-token", + Client = new StubCloudKitClient() + }; + + var provider = new CloudKitStorageProvider(serviceProvider, defaultOptions); + provider.StorageOptionsType.ShouldBe(typeof(CloudKitStorageOptions)); + + var cloned = provider.GetDefaultOptions().ShouldBeOfType(); + cloned.ShouldNotBeSameAs(defaultOptions); + cloned.ContainerId.ShouldBe(defaultOptions.ContainerId); + cloned.ApiToken.ShouldBe(defaultOptions.ApiToken); + cloned.Client.ShouldBeSameAs(defaultOptions.Client); + + var storage = provider.CreateStorage(cloned); + storage.ShouldBeOfType(); + } + + [Fact] + public void CloudKitStorageProvider_WhenOptionsWrong_ShouldThrow() + { + using var serviceProvider = new ServiceCollection().BuildServiceProvider(); + var provider = new CloudKitStorageProvider(serviceProvider, new CloudKitStorageOptions { ContainerId = "iCloud.com.example.app", ApiToken = "test-token", Client = new StubCloudKitClient() }); + + Should.Throw(() => + provider.CreateStorage(new FakeOptions())); + } + + private sealed class FakeOptions : IStorageOptions + { + public bool CreateContainerIfNotExists { get; set; } + } + + private sealed class StubCloudKitClient : ICloudKitClient + { + public Task UploadAsync(string recordName, string internalPath, Stream content, string contentType, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DownloadAsync(string recordName, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task DeleteAsync(string recordName, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task ExistsAsync(string recordName, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public Task GetRecordAsync(string recordName, CancellationToken cancellationToken) => throw new NotImplementedException(); + + public IAsyncEnumerable QueryByPathPrefixAsync(string pathPrefix, CancellationToken cancellationToken) => throw new NotImplementedException(); + } +} + diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageTests.cs new file mode 100644 index 0000000..ec75084 --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageTests.cs @@ -0,0 +1,58 @@ +using System.IO; +using System.Net.Http; +using System.Text; +using System.Threading.Tasks; +using ManagedCode.Storage.CloudKit; +using ManagedCode.Storage.CloudKit.Options; +using Shouldly; +using Xunit; + +namespace ManagedCode.Storage.Tests.Storages.CloudKit; + +public class CloudKitStorageTests +{ + [Fact] + public async Task CloudKitStorage_RoundTrip_WithHttpHandler() + { + var handler = new FakeCloudKitHttpHandler(); + var httpClient = new HttpClient(handler); + + var storage = new CloudKitStorage(new CloudKitStorageOptions + { + ContainerId = "iCloud.com.example.app", + Environment = CloudKitEnvironment.Development, + Database = CloudKitDatabase.Public, + ApiToken = "test-token", + RootPath = "app-data", + HttpClient = httpClient + }); + + var upload = await storage.UploadAsync("storage payload", options => + { + options.Directory = "dir"; + options.FileName = "file.txt"; + }); + + upload.IsSuccess.ShouldBeTrue(); + upload.Value.FullName.ShouldBe("dir/file.txt"); + upload.Value.Container.ShouldBe("iCloud.com.example.app"); + + var download = await storage.DownloadAsync("dir/file.txt"); + download.IsSuccess.ShouldBeTrue(); + using (var reader = new StreamReader(download.Value.FileStream, Encoding.UTF8)) + { + (await reader.ReadToEndAsync()).ShouldBe("storage payload"); + } + + var existsBeforeDelete = await storage.ExistsAsync("dir/file.txt"); + existsBeforeDelete.IsSuccess.ShouldBeTrue(); + existsBeforeDelete.Value.ShouldBeTrue(); + + var deleteDir = await storage.DeleteDirectoryAsync("dir"); + deleteDir.IsSuccess.ShouldBeTrue(); + + var existsAfterDelete = await storage.ExistsAsync("dir/file.txt"); + existsAfterDelete.IsSuccess.ShouldBeTrue(); + existsAfterDelete.Value.ShouldBeFalse(); + } +} diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs new file mode 100644 index 0000000..1b7adff --- /dev/null +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs @@ -0,0 +1,271 @@ +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace ManagedCode.Storage.Tests.Storages.CloudKit; + +internal sealed class FakeCloudKitHttpHandler : HttpMessageHandler +{ + private const string CloudKitHost = "api.apple-cloudkit.com"; + private const string AssetsHost = "assets.example"; + + private readonly Dictionary _records = new(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary _uploads = new(StringComparer.OrdinalIgnoreCase); + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var host = request.RequestUri?.Host ?? string.Empty; + var path = request.RequestUri?.AbsolutePath ?? string.Empty; + + if (host.Equals(CloudKitHost, StringComparison.OrdinalIgnoreCase)) + { + return await HandleCloudKitAsync(request, path, cancellationToken); + } + + if (host.Equals(AssetsHost, StringComparison.OrdinalIgnoreCase)) + { + return await HandleAssetsAsync(request, path, cancellationToken); + } + + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + private async Task HandleCloudKitAsync(HttpRequestMessage request, string path, CancellationToken cancellationToken) + { + if (request.Method != HttpMethod.Post) + { + return new HttpResponseMessage(HttpStatusCode.MethodNotAllowed); + } + + if (path.EndsWith("/assets/upload", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + using var doc = JsonDocument.Parse(body); + var token = doc.RootElement.GetProperty("tokens").EnumerateArray().First(); + var recordName = token.GetProperty("recordName").GetString() ?? string.Empty; + var fieldName = token.GetProperty("fieldName").GetString() ?? "file"; + + return JsonResponse(new + { + tokens = new[] + { + new + { + recordName, + fieldName, + url = $"https://{AssetsHost}/upload/{recordName}" + } + } + }); + } + + if (path.EndsWith("/records/modify", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + using var doc = JsonDocument.Parse(body); + var operation = doc.RootElement.GetProperty("operations").EnumerateArray().First(); + var type = operation.GetProperty("operationType").GetString(); + var record = operation.GetProperty("record"); + var recordName = record.GetProperty("recordName").GetString() ?? string.Empty; + + if (string.Equals(type, "forceDelete", StringComparison.OrdinalIgnoreCase)) + { + if (_records.Remove(recordName)) + { + return JsonResponse(new { }); + } + + return JsonResponse(new + { + errors = new[] + { + new + { + recordName, + serverErrorCode = "NOT_FOUND" + } + } + }); + } + + if (!string.Equals(type, "forceUpdate", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.BadRequest); + } + + var recordType = record.GetProperty("recordType").GetString() ?? "MCStorageFile"; + var fields = record.GetProperty("fields"); + var internalPath = fields.GetProperty("path").GetProperty("value").GetString() ?? string.Empty; + var contentType = fields.GetProperty("contentType").GetProperty("value").GetString() ?? "application/octet-stream"; + + var content = _uploads.TryGetValue(recordName, out var bytes) ? bytes : Array.Empty(); + var now = DateTimeOffset.UtcNow; + + var stored = new StoredRecord(recordName, recordType, internalPath, contentType, content, now, now); + _records[recordName] = stored; + + return JsonResponse(new + { + records = new[] + { + ToRecordResponse(stored) + } + }); + } + + if (path.EndsWith("/records/lookup", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + using var doc = JsonDocument.Parse(body); + var recordName = doc.RootElement.GetProperty("records").EnumerateArray().First().GetProperty("recordName").GetString() ?? string.Empty; + + if (!_records.TryGetValue(recordName, out var stored)) + { + return JsonResponse(new + { + errors = new[] + { + new + { + recordName, + serverErrorCode = "NOT_FOUND" + } + } + }); + } + + return JsonResponse(new + { + records = new[] + { + ToRecordResponse(stored) + } + }); + } + + if (path.EndsWith("/records/query", StringComparison.OrdinalIgnoreCase)) + { + var body = await request.Content!.ReadAsStringAsync(cancellationToken); + using var doc = JsonDocument.Parse(body); + var prefix = doc.RootElement.GetProperty("query") + .GetProperty("filterBy") + .EnumerateArray() + .First() + .GetProperty("fieldValue") + .GetProperty("value") + .GetString() ?? string.Empty; + + var results = _records.Values + .Where(r => r.Path.StartsWith(prefix, StringComparison.OrdinalIgnoreCase)) + .Select(ToRecordResponse) + .ToList(); + + return JsonResponse(new + { + records = results + }); + } + + return new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"Unhandled CloudKit request: {request.Method} {request.RequestUri}") + }; + } + + private async Task HandleAssetsAsync(HttpRequestMessage request, string path, CancellationToken cancellationToken) + { + if (path.StartsWith("/upload/", StringComparison.OrdinalIgnoreCase)) + { + var recordName = path["/upload/".Length..]; + var bytes = request.Content == null ? Array.Empty() : await request.Content.ReadAsByteArrayAsync(cancellationToken); + _uploads[recordName] = bytes; + + return JsonResponse(new + { + singleFile = new + { + receipt = "receipt-" + recordName, + size = bytes.LongLength, + fileChecksum = Convert.ToBase64String(Encoding.UTF8.GetBytes("checksum")) + } + }); + } + + if (path.StartsWith("/download/", StringComparison.OrdinalIgnoreCase)) + { + var recordName = path["/download/".Length..]; + if (!_records.TryGetValue(recordName, out var record)) + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(record.Content) + }; + } + + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + private static object ToRecordResponse(StoredRecord record) + { + var timestamp = record.LastModified.ToUnixTimeMilliseconds(); + return new + { + recordName = record.RecordName, + recordType = record.RecordType, + fields = new Dictionary + { + ["path"] = new Dictionary { ["value"] = record.Path }, + ["contentType"] = new Dictionary { ["value"] = record.ContentType }, + ["file"] = new Dictionary + { + ["value"] = new Dictionary + { + ["downloadURL"] = $"https://{AssetsHost}/download/{record.RecordName}", + ["size"] = record.Content.LongLength + } + } + }, + created = new + { + timestamp + }, + modified = new + { + timestamp + } + }; + } + + private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var bytes = Encoding.UTF8.GetBytes(json); + var response = new HttpResponseMessage(statusCode) + { + Content = new ByteArrayContent(bytes) + }; + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); + return response; + } + + private sealed record StoredRecord( + string RecordName, + string RecordType, + string Path, + string ContentType, + byte[] Content, + DateTimeOffset CreatedOn, + DateTimeOffset LastModified); +} + From 2c809b59b95258a665961ae21c4503ec154ece54 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Mon, 15 Dec 2025 00:01:51 +0100 Subject: [PATCH 4/6] client --- AGENTS.md | 2 + README.md | 17 ++++++-- .../Clients/DropboxClientWrapper.cs | 7 +++- .../DropboxStorage.cs | 38 +++++++++++++++++ .../DropboxStorageProvider.cs | 5 +++ .../Extensions/ServiceCollectionExtensions.cs | 12 +++++- .../Options/DropboxStorageOptions.cs | 22 ++++++++++ .../Storages/Abstracts/UploadTests.cs | 6 ++- .../CloudDriveDependencyInjectionTests.cs | 41 +++++++++++++++++++ .../DropboxClientWrapperHttpTests.cs | 38 ++++++++++++++++- 10 files changed, 179 insertions(+), 9 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index 78fe2be..41fc275 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -81,10 +81,12 @@ If no new rule is detected → do not update the file. ### Documentation (ALL TASKS) - Docs live in `docs/` and `README.md` +- Keep a GitHub Pages docs site in sync with `docs/`, using `DOCS-EXAMPLE/` as the reference template for structure and CI/pipeline - Update docs when behaviour changes - Update configuration examples when required - When adding new projects/providers, ensure `README.md` clearly documents installation, DI wiring, and basic usage examples - Where feasible, prefer provider options that can build vendor SDK clients from credentials (to reduce consumer boilerplate) while still allowing client injection for advanced scenarios +- Avoid "ownership flags" like `ownsClient`; prefer a clear swap point (wrapper/factory) so lifetime and disposal rules stay simple and predictable - For providers that rely on vendor SDK clients (Graph/Drive/Dropbox/etc.), document how to obtain credentials/keys/tokens and include a minimal code snippet that builds the required SDK client instance ### Testing (ALL TASKS) diff --git a/README.md b/README.md index aef8691..efe746c 100644 --- a/README.md +++ b/README.md @@ -225,19 +225,30 @@ var tenantStorage = app.Services.GetRequiredKeyedService("tenant-a"); var refreshToken = auth.RefreshToken; // store securely if you requested offline access ``` -5. Create the Dropbox client and register Dropbox storage with a root path (use `/` for full access apps or `/Apps/` for app folders): +5. Register Dropbox storage with a root path (use `/` for full access apps or `/Apps/` for app folders). You can let the provider create the SDK client from credentials: ```csharp - using Dropbox.Api; builder.Services.AddDropboxStorageAsDefault(options => { var accessToken = configuration["Dropbox:AccessToken"]!; - options.DropboxClient = new DropboxClient(accessToken); + options.AccessToken = accessToken; options.RootPath = "/apps/my-app"; options.CreateContainerIfNotExists = true; }); ``` + Or, for production, prefer refresh tokens (offline access): + + ```csharp + builder.Services.AddDropboxStorageAsDefault(options => + { + options.RefreshToken = configuration["Dropbox:RefreshToken"]!; + options.AppKey = configuration["Dropbox:AppKey"]!; + options.AppSecret = configuration["Dropbox:AppSecret"]; // optional when using PKCE + options.RootPath = "/apps/my-app"; + }); + ``` + 6. Store tokens in user secrets or environment variables; never commit them to source control. **CloudKit (iCloud app data)** diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs index 1c0c5bd..b83b4b9 100644 --- a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs +++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs @@ -10,7 +10,7 @@ namespace ManagedCode.Storage.Dropbox.Clients; -public class DropboxClientWrapper : IDropboxClientWrapper +public class DropboxClientWrapper : IDropboxClientWrapper, IDisposable { private readonly DropboxClient _client; @@ -19,6 +19,11 @@ public DropboxClientWrapper(DropboxClient client) _client = client ?? throw new ArgumentNullException(nameof(client)); } + public void Dispose() + { + _client.Dispose(); + } + public async Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken) { if (string.IsNullOrWhiteSpace(rootPath)) diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs index 4f8393b..a1a923a 100644 --- a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs +++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs @@ -36,9 +36,47 @@ protected override IDropboxClientWrapper CreateStorageClient() return new DropboxClientWrapper(StorageOptions.DropboxClient); } + if (!string.IsNullOrWhiteSpace(StorageOptions.AccessToken)) + { + var client = StorageOptions.DropboxClientConfig == null + ? new global::Dropbox.Api.DropboxClient(StorageOptions.AccessToken) + : new global::Dropbox.Api.DropboxClient(StorageOptions.AccessToken, StorageOptions.DropboxClientConfig); + + return new DropboxClientWrapper(client); + } + + if (!string.IsNullOrWhiteSpace(StorageOptions.RefreshToken)) + { + if (string.IsNullOrWhiteSpace(StorageOptions.AppKey)) + { + throw new InvalidOperationException("Dropbox AppKey is required when configuring the storage with a refresh token."); + } + + var client = CreateDropboxClientFromRefreshToken(StorageOptions.RefreshToken, StorageOptions.AppKey, StorageOptions.AppSecret, StorageOptions.DropboxClientConfig); + return new DropboxClientWrapper(client); + } + throw new InvalidOperationException("Dropbox client is not configured for storage."); } + private static global::Dropbox.Api.DropboxClient CreateDropboxClientFromRefreshToken( + string refreshToken, + string appKey, + string? appSecret, + global::Dropbox.Api.DropboxClientConfig? config) + { + if (!string.IsNullOrWhiteSpace(appSecret)) + { + return config == null + ? new global::Dropbox.Api.DropboxClient(refreshToken, appKey, appSecret) + : new global::Dropbox.Api.DropboxClient(refreshToken, appKey, appSecret, config); + } + + return config == null + ? new global::Dropbox.Api.DropboxClient(refreshToken, appKey) + : new global::Dropbox.Api.DropboxClient(refreshToken, appKey, config); + } + protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default) { try diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs index d464877..e0f4eae 100644 --- a/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs +++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs @@ -32,6 +32,11 @@ public IStorageOptions GetDefaultOptions() RootPath = defaultOptions.RootPath, DropboxClient = defaultOptions.DropboxClient, Client = defaultOptions.Client, + AccessToken = defaultOptions.AccessToken, + RefreshToken = defaultOptions.RefreshToken, + AppKey = defaultOptions.AppKey, + AppSecret = defaultOptions.AppSecret, + DropboxClientConfig = defaultOptions.DropboxClientConfig, CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists }; } diff --git a/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs index 476b077..4ee7c92 100644 --- a/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs @@ -85,9 +85,17 @@ public static IServiceCollection AddDropboxStorageAsDefault(this IServiceCollect private static void CheckConfiguration(DropboxStorageOptions options) { - if (options.Client == null && options.DropboxClient == null) + if (!string.IsNullOrWhiteSpace(options.RefreshToken) && string.IsNullOrWhiteSpace(options.AppKey)) { - throw new BadConfigurationException("Dropbox storage requires either a configured DropboxClient or a custom IDropboxClientWrapper."); + throw new BadConfigurationException("Dropbox storage configuration with a refresh token requires AppKey (and optionally AppSecret)."); + } + + if (options.Client == null + && options.DropboxClient == null + && string.IsNullOrWhiteSpace(options.AccessToken) + && string.IsNullOrWhiteSpace(options.RefreshToken)) + { + throw new BadConfigurationException("Dropbox storage requires either a configured DropboxClient, a custom IDropboxClientWrapper, or credentials (AccessToken / RefreshToken + AppKey)."); } } } diff --git a/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs index 0a504e7..1ea5b20 100644 --- a/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs +++ b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs @@ -10,6 +10,28 @@ public class DropboxStorageOptions : IStorageOptions public DropboxClient? DropboxClient { get; set; } + /// + /// OAuth2 access token (short-lived or long-lived) used to create a when is not provided. + /// + public string? AccessToken { get; set; } + + /// + /// OAuth2 refresh token used to create a when is not provided. + /// + public string? RefreshToken { get; set; } + + /// + /// Dropbox app key (required when using ). + /// + public string? AppKey { get; set; } + + /// + /// Dropbox app secret (optional when using PKCE refresh tokens). + /// + public string? AppSecret { get; set; } + + public DropboxClientConfig? DropboxClientConfig { get; set; } + public string RootPath { get; set; } = string.Empty; public bool CreateContainerIfNotExists { get; set; } = true; diff --git a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs index fc6c1de..d9af1eb 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs @@ -25,11 +25,12 @@ public async Task UploadAsync_AsText_WithoutOptions() // Act var result = await Storage.UploadAsync(uploadContent); - var downloadedResult = await Storage.DownloadAsync(result.Value!.Name); // Assert result.IsSuccess .ShouldBeTrue(); + + var downloadedResult = await Storage.DownloadAsync(result.Value!.Name); downloadedResult.IsSuccess .ShouldBeTrue(); } @@ -44,11 +45,12 @@ public async Task UploadAsync_AsStream_WithoutOptions() // Act var result = await Storage.UploadAsync(stream); - var downloadedResult = await Storage.DownloadAsync(result.Value!.Name); // Assert result.IsSuccess .ShouldBeTrue(); + + var downloadedResult = await Storage.DownloadAsync(result.Value!.Name); downloadedResult.IsSuccess .ShouldBeTrue(); } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs index 53d4561..5984687 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs @@ -68,6 +68,47 @@ public void Dropbox_WhenClientNotConfigured_ShouldThrow() services.AddDropboxStorage(options => options.RootPath = "/apps/demo")); } + [Fact] + public void Dropbox_WhenAccessTokenConfigured_ShouldResolve() + { + var services = new ServiceCollection(); + services.AddDropboxStorage(options => + { + options.RootPath = "/apps/demo"; + options.AccessToken = "test-token"; + }); + + using var provider = services.BuildServiceProvider(); + provider.GetRequiredService().ShouldNotBeNull(); + } + + [Fact] + public void Dropbox_WhenRefreshTokenMissingAppKey_ShouldThrow() + { + var services = new ServiceCollection(); + Should.Throw(() => + services.AddDropboxStorage(options => + { + options.RootPath = "/apps/demo"; + options.RefreshToken = "refresh-token"; + })); + } + + [Fact] + public void Dropbox_WhenRefreshTokenConfigured_ShouldResolve() + { + var services = new ServiceCollection(); + services.AddDropboxStorage(options => + { + options.RootPath = "/apps/demo"; + options.RefreshToken = "refresh-token"; + options.AppKey = "app-key"; + }); + + using var provider = services.BuildServiceProvider(); + provider.GetRequiredService().ShouldNotBeNull(); + } + [Fact] public void GoogleDrive_AddAsDefault_ShouldResolveIStorage() { diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs index 07fe94f..879ee84 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs @@ -9,7 +9,9 @@ using System.Threading; using System.Threading.Tasks; using Dropbox.Api; +using ManagedCode.Storage.Dropbox; using ManagedCode.Storage.Dropbox.Clients; +using ManagedCode.Storage.Dropbox.Options; using Shouldly; using Xunit; @@ -24,7 +26,8 @@ public async Task DropboxClientWrapper_WithHttpHandler_RoundTrip() var httpClient = new HttpClient(handler); var config = new DropboxClientConfig("ManagedCode.Storage.Tests") { - HttpClient = httpClient + HttpClient = httpClient, + LongPollHttpClient = httpClient }; using var dropboxClient = new DropboxClient("test-token", config); @@ -59,6 +62,39 @@ public async Task DropboxClientWrapper_WithHttpHandler_RoundTrip() (await wrapper.DeleteAsync("/apps/demo", "file.json", CancellationToken.None)).ShouldBeFalse(); } + [Fact] + public async Task DropboxStorage_WithAccessTokenAndHttpHandler_RoundTrip() + { + var handler = new FakeDropboxHttpHandler(); + var httpClient = new HttpClient(handler); + var config = new DropboxClientConfig("ManagedCode.Storage.Tests") + { + HttpClient = httpClient, + LongPollHttpClient = httpClient + }; + + using var storage = new DropboxStorage(new DropboxStorageOptions + { + RootPath = "/apps/demo", + AccessToken = "test-token", + DropboxClientConfig = config, + CreateContainerIfNotExists = true + }); + + (await storage.UploadAsync("dropbox payload", options => options.FileName = "file.json")).IsSuccess.ShouldBeTrue(); + + var exists = await storage.ExistsAsync("file.json"); + exists.IsSuccess.ShouldBeTrue(); + exists.Value.ShouldBeTrue(); + + var download = await storage.DownloadAsync("file.json"); + download.IsSuccess.ShouldBeTrue(); + using (var reader = new StreamReader(download.Value.FileStream, Encoding.UTF8)) + { + (await reader.ReadToEndAsync()).ShouldBe("dropbox payload"); + } + } + private sealed class FakeDropboxHttpHandler : HttpMessageHandler { private const string ApiHost = "api.dropboxapi.com"; From ec2eb57c919356215d1ea67da67b15e3eae8eeb2 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Mon, 15 Dec 2025 10:47:57 +0100 Subject: [PATCH 5/6] code + docs --- .github/workflows/jekyll-gh-pages.yml | 347 ++++++ AGENTS.md | 2 + .../StorageSignalRClient.cs | 16 +- .../StorageClient.cs | 26 +- .../ControllerDownloadExtensions.cs | 6 +- .../Controller/ControllerUploadExtensions.cs | 144 +-- .../Storage/StorageFromFileExtensions.cs | 2 +- ManagedCode.Storage.Core/BaseStorage.cs | 6 +- .../Constants/MetadataKeys.cs | 22 +- .../Extensions/ServiceCollectionExtensions.cs | 2 +- .../Helpers/PathHelper.cs | 16 +- ManagedCode.Storage.Core/IDownloader.cs | 4 +- .../IStorageOperations.cs | 2 +- ManagedCode.Storage.Core/IUploader.cs | 2 +- .../Models/MetadataOptions.cs | 2 +- .../Providers/IStorageFactory.cs | 12 +- .../Providers/IStorageProvider.cs | 8 +- .../Providers/StorageFactory.cs | 12 +- ManagedCode.Storage.Core/Utf8StringStream.cs | 12 +- .../Extensions/ServiceCollectionExtensions.cs | 20 +- .../Implementations/VirtualDirectory.cs | 42 +- .../Implementations/VirtualFile.cs | 46 +- .../Implementations/VirtualFileSystem.cs | 176 +-- .../Options/VfsOptions.cs | 2 +- .../Streaming/VfsWriteStream.cs | 6 +- README.md | 160 ++- .../ManagedCode.Storage.Aws/AWSStorage.cs | 22 +- .../AWSStorageProvider.cs | 8 +- .../ManagedCode.Storage.Aws/BlobStream.cs | 20 +- .../Extensions/ServiceCollectionExtensions.cs | 4 +- .../Extensions/StorageFactoryExtensions.cs | 12 +- .../Options/AWSStorageOptions.cs | 2 +- .../AzureDataLakeStorage.cs | 10 +- .../AzureDataLakeStorageProvider.cs | 8 +- .../Extensions/ServiceCollectionExtensions.cs | 6 +- .../Extensions/StorageFactoryExtensions.cs | 12 +- .../ManagedCode.Storage.Azure/AzureStorage.cs | 12 +- .../AzureStorageProvider.cs | 8 +- .../Extensions/ServiceCollectionExtensions.cs | 4 +- .../Extensions/StorageFactoryExtensions.cs | 12 +- .../Clients/CloudKitClient.cs | 60 +- .../Options/CloudKitStorageOptions.cs | 4 +- .../Extensions/ServiceCollectionExtensions.cs | 10 +- .../Extensions/StorageFactoryExtensions.cs | 10 +- .../FileSystemStorage.cs | 2 +- .../FileSystemStorageProvider.cs | 8 +- .../Extensions/ServiceCollectionExtensions.cs | 4 +- .../Extensions/StorageFactoryExtensions.cs | 12 +- .../ManagedCode.Storage.Google/GCPStorage.cs | 34 +- .../GCPStorageProvider.cs | 10 +- .../Common/EmptyContainer.cs | 4 +- .../Common/FileHelper.cs | 6 +- .../Common/StorageTestApplication.cs | 4 +- .../Common/TestApp/HttpHostProgram.cs | 2 +- .../Constants/ApiEndpoints.cs | 2 +- .../Core/StringStreamTests.cs | 52 +- .../FormFileExtensionsTests.cs | 2 +- .../ExtensionsTests/StorageExtensionsTests.cs | 20 +- .../StoragePrivderExtensionsTests.cs | 20 +- .../Storages/Abstracts/ContainerTests.cs | 2 +- .../Storages/Abstracts/UploadTests.cs | 8 +- .../CloudKit/CloudKitClientHttpTests.cs | 34 + .../CloudKit/FakeCloudKitHttpHandler.cs | 103 +- .../FileSystem/FileSystemUploadTests.cs | 4 +- .../Storages/GCS/GCSUploadTests.cs | 1 + docs/ADR/0001-icloud-drive-support.md | 73 ++ docs/ADR/index.md | 20 + docs/API/index.md | 14 + docs/API/storage-server.md | 155 +++ docs/Development/credentials.md | 165 +++ docs/Development/seo-audit-github-pages.md | 94 ++ docs/Development/setup.md | 46 + docs/Features/chunked-uploads.md | 66 ++ docs/Features/dependency-injection.md | 75 ++ docs/Features/index.md | 42 + docs/Features/integration-aspnet-server.md | 82 ++ docs/Features/integration-dotnet-client.md | 59 + docs/Features/integration-signalr-client.md | 40 + docs/Features/mime-and-crc.md | 36 + docs/Features/provider-aws-s3.md | 57 + docs/Features/provider-azure-blob.md | 62 + docs/Features/provider-azure-datalake.md | 51 + docs/Features/provider-cloudkit.md | 75 ++ docs/Features/provider-dropbox.md | 88 ++ docs/Features/provider-filesystem.md | 56 + .../Features/provider-google-cloud-storage.md | 60 + docs/Features/provider-googledrive.md | 80 ++ docs/Features/provider-onedrive.md | 77 ++ docs/Features/provider-sftp.md | 56 + docs/Features/storage-core.md | 67 ++ docs/Features/testfakes.md | 37 + docs/Features/virtual-file-system.md | 90 ++ docs/Testing/strategy.md | 62 + docs/server-streaming-plan.md | 35 +- docs/templates/ADR-Template.md | 42 + docs/templates/Feature-Template.md | 53 + github-pages/404.html | 16 + github-pages/_config.yml | 27 + github-pages/_layouts/default.html | 222 ++++ github-pages/adr/index.md | 13 + github-pages/api/index.md | 8 + github-pages/assets/css/style.css | 1049 +++++++++++++++++ github-pages/assets/images/favicon.svg | 10 + github-pages/assets/images/og-image.png | Bin 0 -> 10830 bytes github-pages/credentials.md | 9 + github-pages/features/index.md | 8 + github-pages/index.md | 10 + github-pages/robots.txt | 7 + github-pages/setup.md | 9 + github-pages/sitemap.xml | 15 + github-pages/templates.md | 8 + github-pages/testing.md | 8 + 112 files changed, 4551 insertions(+), 536 deletions(-) create mode 100644 .github/workflows/jekyll-gh-pages.yml create mode 100644 docs/ADR/0001-icloud-drive-support.md create mode 100644 docs/ADR/index.md create mode 100644 docs/API/index.md create mode 100644 docs/API/storage-server.md create mode 100644 docs/Development/credentials.md create mode 100644 docs/Development/seo-audit-github-pages.md create mode 100644 docs/Development/setup.md create mode 100644 docs/Features/chunked-uploads.md create mode 100644 docs/Features/dependency-injection.md create mode 100644 docs/Features/index.md create mode 100644 docs/Features/integration-aspnet-server.md create mode 100644 docs/Features/integration-dotnet-client.md create mode 100644 docs/Features/integration-signalr-client.md create mode 100644 docs/Features/mime-and-crc.md create mode 100644 docs/Features/provider-aws-s3.md create mode 100644 docs/Features/provider-azure-blob.md create mode 100644 docs/Features/provider-azure-datalake.md create mode 100644 docs/Features/provider-cloudkit.md create mode 100644 docs/Features/provider-dropbox.md create mode 100644 docs/Features/provider-filesystem.md create mode 100644 docs/Features/provider-google-cloud-storage.md create mode 100644 docs/Features/provider-googledrive.md create mode 100644 docs/Features/provider-onedrive.md create mode 100644 docs/Features/provider-sftp.md create mode 100644 docs/Features/storage-core.md create mode 100644 docs/Features/testfakes.md create mode 100644 docs/Features/virtual-file-system.md create mode 100644 docs/Testing/strategy.md create mode 100644 docs/templates/ADR-Template.md create mode 100644 docs/templates/Feature-Template.md create mode 100644 github-pages/404.html create mode 100644 github-pages/_config.yml create mode 100644 github-pages/_layouts/default.html create mode 100644 github-pages/adr/index.md create mode 100644 github-pages/api/index.md create mode 100644 github-pages/assets/css/style.css create mode 100644 github-pages/assets/images/favicon.svg create mode 100644 github-pages/assets/images/og-image.png create mode 100644 github-pages/credentials.md create mode 100644 github-pages/features/index.md create mode 100644 github-pages/index.md create mode 100644 github-pages/robots.txt create mode 100644 github-pages/setup.md create mode 100644 github-pages/sitemap.xml create mode 100644 github-pages/templates.md create mode 100644 github-pages/testing.md diff --git a/.github/workflows/jekyll-gh-pages.yml b/.github/workflows/jekyll-gh-pages.yml new file mode 100644 index 0000000..31a469c --- /dev/null +++ b/.github/workflows/jekyll-gh-pages.yml @@ -0,0 +1,347 @@ +name: Docs (GitHub Pages) + +on: + push: + branches: [ main ] + paths: + - README.md + - docs/** + - github-pages/** + - .github/workflows/jekyll-gh-pages.yml + workflow_dispatch: + +permissions: + contents: read + pages: write + id-token: write + +concurrency: + group: pages + cancel-in-progress: false + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v5 + + - name: Add version and build date + run: | + echo "version: \"${{ github.run_number }}\"" >> github-pages/_config.yml + echo "build_date: \"$(date +'%Y-%m-%d')\"" >> github-pages/_config.yml + + - name: Generate site pages from docs + run: | + set -euo pipefail + + rewrite_md_links() { + # Rewrite *.md links to *.html for the rendered site (keep source docs GitHub-friendly). + # Only rewrites relative links (no ':' in the target) so external URLs are untouched. + sed -E \ + -e 's|\]\(README\.md\)|](/)|g' \ + -e 's|\]\(([^):]+)\.md(#[^)]*)?\)|](\1.html\2)|g' + } + + extract_description() { + # Grab the first meaningful paragraph (skips headings, code fences, and bullet lists). + awk ' + BEGIN { in_code=0 } + /^```/ { in_code = !in_code; next } + in_code { next } + NR == 1 { next } # skip H1 + /^[[:space:]]*$/ { next } # skip blanks + /^#/ { next } # skip headings + /^(Accepted|Proposed|Rejected|Superseded|Deprecated)[[:space:]]/ { next } # skip ADR status line + /^[[:space:]]*[-*]/ { next } # skip bullets + { print; exit } + ' + } + + keywords_for_feature() { + case "$1" in + storage-core.md) echo "IStorage, ManagedCode.Storage.Core, provider-agnostic storage, upload, download, streaming, metadata, Result, .NET" ;; + dependency-injection.md) echo "dependency injection, keyed services, IServiceCollection, IStorage, multi-tenant storage, .NET" ;; + virtual-file-system.md) echo "virtual file system, IVirtualFileSystem, overlay, metadata cache, ManagedCode.Storage.VirtualFileSystem, .NET" ;; + mime-and-crc.md) echo "MimeHelper, content-type, CRC32, integrity, ManagedCode.MimeTypes, ManagedCode.Storage.Core" ;; + testfakes.md) echo "test fakes, provider doubles, Testcontainers, integration tests, ManagedCode.Storage.TestFakes" ;; + integration-aspnet-server.md) echo "ASP.NET storage controller, streaming upload, ranged download, SignalR hub, ManagedCode.Storage.Server" ;; + integration-dotnet-client.md) echo ".NET HTTP client, StorageClient, upload, download, chunked upload, CRC32, ManagedCode.Storage.Client" ;; + integration-signalr-client.md) echo "SignalR client, streaming, upload, download, progress, StorageSignalRClient, ManagedCode.Storage.Client.SignalR" ;; + chunked-uploads.md) echo "chunked uploads, resumable upload, CRC32, ASP.NET, ManagedCode.Storage.Server, ManagedCode.Storage.Client" ;; + provider-azure-blob.md) echo "Azure Blob Storage, ManagedCode.Storage.Azure, IStorage, BlobClient, container, streaming upload, download, .NET" ;; + provider-azure-datalake.md) echo "Azure Data Lake Gen2, ADLS, ManagedCode.Storage.Azure.DataLake, IStorage, filesystem, directory, .NET" ;; + provider-aws-s3.md) echo "Amazon S3, AWS S3, ManagedCode.Storage.Aws, IStorage, bucket, streaming upload, Object Lock, legal hold, .NET" ;; + provider-google-cloud-storage.md) echo "Google Cloud Storage, GCS, ManagedCode.Storage.Gcp, StorageClient, IStorage, bucket, streaming, .NET" ;; + provider-filesystem.md) echo "file system storage, local development, ManagedCode.Storage.FileSystem, IStorage, tests, .NET" ;; + provider-sftp.md) echo "SFTP storage, SSH.NET, ManagedCode.Storage.Sftp, IStorage, upload, download, .NET" ;; + provider-onedrive.md) echo "OneDrive, Microsoft Graph, GraphServiceClient, Entra ID, OAuth, ManagedCode.Storage.OneDrive, IStorage, .NET" ;; + provider-googledrive.md) echo "Google Drive API, DriveService, OAuth, service account, ManagedCode.Storage.GoogleDrive, IStorage, .NET" ;; + provider-dropbox.md) echo "Dropbox API, DropboxClient, OAuth2, refresh token, ManagedCode.Storage.Dropbox, IStorage, .NET" ;; + provider-cloudkit.md) echo "CloudKit Web Services, iCloud app data, ckAPIToken, ckWebAuthToken, ManagedCode.Storage.CloudKit, IStorage, .NET" ;; + *) echo "ManagedCode.Storage, IStorage, .NET, storage" ;; + esac + } + + keywords_for_api() { + case "$1" in + storage-server.md) echo "storage API, ASP.NET controllers, SignalR hub, upload, download, streaming, chunked upload, ranged download, ManagedCode.Storage.Server" ;; + *) echo "ManagedCode.Storage API, HTTP, SignalR, ASP.NET, streaming" ;; + esac + } + + keywords_for_adr() { + case "$1" in + 0001-icloud-drive-support.md) echo "iCloud Drive, CloudKit, Apple, server-side storage, provider design, ManagedCode.Storage.CloudKit, ADR" ;; + *) echo "architecture decision record, ADR, ManagedCode.Storage" ;; + esac + } + + mkdir -p github-pages/features + + WORDS=$(wc -w < README.md) + MINUTES=$(( (WORDS + 200) / 200 )) + + cat > github-pages/index.md << 'EOF' + --- + layout: default + title: Home + description: ManagedCode.Storage documentation: cross-provider storage toolkit for .NET and ASP.NET streaming scenarios. + keywords: ManagedCode.Storage, IStorage, .NET, ASP.NET, SignalR, Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit, SFTP, chunked uploads, streaming uploads + is_home: true + nav_order: 1 + --- + EOF + sed -i 's/^ //' github-pages/index.md + cat README.md >> github-pages/index.md + echo "" >> github-pages/index.md + echo "

${MINUTES} min read

" >> github-pages/index.md + + cat > github-pages/setup.md << 'EOF' + --- + layout: default + title: Setup + description: How to clone, build, and run tests for ManagedCode.Storage. + keywords: ManagedCode.Storage setup, .NET 10, dotnet restore, dotnet build, dotnet test, Docker, Testcontainers, Azurite, LocalStack, FakeGcsServer, SFTP + nav_order: 2 + --- + EOF + sed -i 's/^ //' github-pages/setup.md + rewrite_md_links < docs/Development/setup.md >> github-pages/setup.md + + cat > github-pages/credentials.md << 'EOF' + --- + layout: default + title: Credentials + description: How to obtain credentials for OneDrive, Google Drive, Dropbox, and CloudKit. + keywords: OneDrive credentials, Microsoft Graph auth, Entra ID, Google Drive OAuth, Drive API, service account, Dropbox OAuth2, refresh token, CloudKit ckAPIToken, ckWebAuthToken + nav_order: 3 + --- + EOF + sed -i 's/^ //' github-pages/credentials.md + rewrite_md_links < docs/Development/credentials.md >> github-pages/credentials.md + + cat > github-pages/testing.md << 'EOF' + --- + layout: default + title: Testing + description: Test strategy and how to run the ManagedCode.Storage test suite. + keywords: ManagedCode.Storage tests, xUnit, Shouldly, integration tests, Testcontainers, Azurite, LocalStack, FakeGcsServer, HttpMessageHandler fakes + nav_order: 4 + --- + EOF + sed -i 's/^ //' github-pages/testing.md + rewrite_md_links < docs/Testing/strategy.md >> github-pages/testing.md + + cat > github-pages/features/index.md << 'EOF' + --- + layout: default + title: Features + description: Documentation for major modules and providers in ManagedCode.Storage. + keywords: IStorage, providers, Azure Blob, AWS S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit, FileSystem, SFTP, Virtual File System, ASP.NET Server, SignalR + nav_order: 5 + --- + EOF + sed -i 's/^ //' github-pages/features/index.md + rewrite_md_links < docs/Features/index.md >> github-pages/features/index.md + + for file in docs/Features/*.md; do + base=$(basename "$file") + if [ "$base" = "index.md" ]; then + continue + fi + + title=$(head -n 1 "$file" | sed 's/^# //') + title_escaped=$(printf '%s' "$title" | sed 's/"/\\"/g') + + desc=$(extract_description < "$file" | tr -d '\r\n') + desc=$(printf '%s' "$desc" | sed -E 's/[[:space:]]+/ /g; s/[[:space:]]+$//; s/:$//') + if [ ${#desc} -gt 160 ]; then + desc="${desc:0:159}…" + fi + if [ -z "${desc}" ]; then + desc="${title} documentation." + fi + desc_escaped=$(printf '%s' "$desc" | sed 's/"/\\"/g') + + keywords=$(keywords_for_feature "$base") + keywords_escaped=$(printf '%s' "$keywords" | sed 's/"/\\"/g') + cat > "github-pages/features/$base" << EOF + --- + layout: default + title: "${title_escaped}" + description: "${desc_escaped}" + keywords: "${keywords_escaped}" + --- + EOF + sed -i 's/^ //' "github-pages/features/$base" + rewrite_md_links < "$file" >> "github-pages/features/$base" + done + + mkdir -p github-pages/adr + + cat > github-pages/adr/index.md << 'EOF' + --- + layout: default + title: ADR + description: Architecture Decision Records (ADR) for ManagedCode.Storage. + keywords: architecture decisions, ADR, ManagedCode.Storage, design decisions, providers, CloudKit, iCloud Drive + nav_order: 6 + --- + EOF + sed -i 's/^ //' github-pages/adr/index.md + rewrite_md_links < docs/ADR/index.md >> github-pages/adr/index.md + + for file in docs/ADR/*.md; do + base=$(basename "$file") + if [ "$base" = "index.md" ]; then + continue + fi + + title=$(head -n 1 "$file" | sed 's/^# //') + title_escaped=$(printf '%s' "$title" | sed 's/"/\\"/g') + + desc=$(extract_description < "$file" | tr -d '\r\n') + desc=$(printf '%s' "$desc" | sed -E 's/[[:space:]]+/ /g; s/[[:space:]]+$//; s/:$//') + if [ ${#desc} -gt 160 ]; then + desc="${desc:0:159}…" + fi + if [ -z "${desc}" ]; then + desc="${title} decision record." + fi + desc_escaped=$(printf '%s' "$desc" | sed 's/"/\\"/g') + + keywords=$(keywords_for_adr "$base") + keywords_escaped=$(printf '%s' "$keywords" | sed 's/"/\\"/g') + cat > "github-pages/adr/$base" << EOF + --- + layout: default + title: "${title_escaped}" + description: "${desc_escaped}" + keywords: "${keywords_escaped}" + --- + EOF + sed -i 's/^ //' "github-pages/adr/$base" + rewrite_md_links < "$file" >> "github-pages/adr/$base" + done + + mkdir -p github-pages/api + + cat > github-pages/api/index.md << 'EOF' + --- + layout: default + title: API + description: HTTP and SignalR API documentation for ManagedCode.Storage.Server. + keywords: storage API, HTTP, SignalR, ASP.NET controllers, upload, download, streaming, chunked uploads, ranged downloads, ManagedCode.Storage.Server + nav_order: 7 + --- + EOF + sed -i 's/^ //' github-pages/api/index.md + rewrite_md_links < docs/API/index.md >> github-pages/api/index.md + + for file in docs/API/*.md; do + base=$(basename "$file") + if [ "$base" = "index.md" ]; then + continue + fi + + title=$(head -n 1 "$file" | sed 's/^# //') + title_escaped=$(printf '%s' "$title" | sed 's/"/\\"/g') + + desc=$(extract_description < "$file" | tr -d '\r\n') + desc=$(printf '%s' "$desc" | sed -E 's/[[:space:]]+/ /g; s/[[:space:]]+$//; s/:$//') + if [ ${#desc} -gt 160 ]; then + desc="${desc:0:159}…" + fi + if [ -z "${desc}" ]; then + desc="${title} documentation." + fi + desc_escaped=$(printf '%s' "$desc" | sed 's/"/\\"/g') + + keywords=$(keywords_for_api "$base") + keywords_escaped=$(printf '%s' "$keywords" | sed 's/"/\\"/g') + cat > "github-pages/api/$base" << EOF + --- + layout: default + title: "${title_escaped}" + description: "${desc_escaped}" + keywords: "${keywords_escaped}" + --- + EOF + sed -i 's/^ //' "github-pages/api/$base" + rewrite_md_links < "$file" >> "github-pages/api/$base" + done + + cat > github-pages/templates.md << 'EOF' + --- + layout: default + title: Templates + description: Documentation templates used in this repository (Feature and ADR templates). + keywords: documentation templates, feature template, ADR template, MCAF, ManagedCode.Storage docs + nav_order: 8 + --- + + # Templates + + These templates are used to keep documentation consistent and MCAF-friendly. + +
+ EOF + sed -i 's/^ //' github-pages/templates.md + + for file in docs/templates/*.md; do + if [ -f "$file" ]; then + filename=$(basename "$file") + name="${filename%.md}" + + echo "
" >> github-pages/templates.md + echo "${name}" >> github-pages/templates.md + echo "
" >> github-pages/templates.md + echo "View" >> github-pages/templates.md + echo "Download" >> github-pages/templates.md + echo "
" >> github-pages/templates.md + echo "
" >> github-pages/templates.md + fi + done + + echo "
" >> github-pages/templates.md + + - name: Setup Pages + uses: actions/configure-pages@v5 + + - name: Build with Jekyll + uses: actions/jekyll-build-pages@v1 + with: + source: ./github-pages + destination: ./_site + + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + + deploy: + needs: build + runs-on: ubuntu-latest + steps: + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/AGENTS.md b/AGENTS.md index 41fc275..fd8304a 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -82,8 +82,10 @@ If no new rule is detected → do not update the file. - Docs live in `docs/` and `README.md` - Keep a GitHub Pages docs site in sync with `docs/`, using `DOCS-EXAMPLE/` as the reference template for structure and CI/pipeline +- When adding new docs pages under `docs/Features/`, `docs/ADR/`, or `docs/API/`, also update the corresponding `index.md` to link the page so it’s discoverable in the docs catalog/navigation (the site generator will still publish the page even without the link) - Update docs when behaviour changes - Update configuration examples when required +- Documentation must include clear schemas/diagrams (prefer Mermaid) for every non-trivial feature and integration so GitHub users can understand flows quickly - When adding new projects/providers, ensure `README.md` clearly documents installation, DI wiring, and basic usage examples - Where feasible, prefer provider options that can build vendor SDK clients from credentials (to reduce consumer boilerplate) while still allowing client injection for advanced scenarios - Avoid "ownership flags" like `ownsClient`; prefer a clear swap point (wrapper/factory) so lifetime and disposal rules stay simple and predictable diff --git a/Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClient.cs b/Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClient.cs index 70da271..7a36942 100644 --- a/Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClient.cs +++ b/Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClient.cs @@ -81,15 +81,15 @@ public async Task ConnectAsync(StorageSignalRClientOptions options, Cancellation RegisterHubHandlers(_connection); - if (options.KeepAliveInterval.HasValue) - { - _connection.KeepAliveInterval = options.KeepAliveInterval.Value; - } + if (options.KeepAliveInterval.HasValue) + { + _connection.KeepAliveInterval = options.KeepAliveInterval.Value; + } - if (options.ServerTimeout.HasValue) - { - _connection.ServerTimeout = options.ServerTimeout.Value; - } + if (options.ServerTimeout.HasValue) + { + _connection.ServerTimeout = options.ServerTimeout.Value; + } await _connection.StartAsync(cancellationToken).ConfigureAwait(false); } diff --git a/Integraions/ManagedCode.Storage.Client/StorageClient.cs b/Integraions/ManagedCode.Storage.Client/StorageClient.cs index cfd8fe8..caf35f2 100644 --- a/Integraions/ManagedCode.Storage.Client/StorageClient.cs +++ b/Integraions/ManagedCode.Storage.Client/StorageClient.cs @@ -265,23 +265,23 @@ public async Task> UploadLargeFile(Stream file, string uploadApiUrl checksum = valueElement.GetUInt32(); break; case JsonValueKind.Object: - { - try { - var dto = JsonSerializer.Deserialize(valueElement.GetRawText()); - if (dto == null) + try { - return Result.Fail("Chunk upload completion response is empty"); + var dto = JsonSerializer.Deserialize(valueElement.GetRawText()); + if (dto == null) + { + return Result.Fail("Chunk upload completion response is empty"); + } + + checksum = dto.Checksum; + break; + } + catch (JsonException ex) + { + return Result.Fail(ex); } - - checksum = dto.Checksum; - break; - } - catch (JsonException ex) - { - return Result.Fail(ex); } - } case JsonValueKind.String when uint.TryParse(valueElement.GetString(), out var parsed): checksum = parsed; break; diff --git a/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerDownloadExtensions.cs b/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerDownloadExtensions.cs index 93f23bc..064f63c 100644 --- a/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerDownloadExtensions.cs +++ b/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerDownloadExtensions.cs @@ -27,10 +27,10 @@ public static async Task DownloadAsStreamAsync( var result = await storage.GetStreamAsync(blobName, cancellationToken); if (result.IsFailed) throw new FileNotFoundException(blobName); - + return Results.Stream(result.Value, MimeHelper.GetMimeType(blobName), blobName, enableRangeProcessing: enableRangeProcessing); } - + /// /// Downloads the specified blob as a . /// @@ -44,7 +44,7 @@ public static async Task DownloadAsFileResultAsync( var result = await storage.GetStreamAsync(blobName, cancellationToken); if (result.IsFailed) throw new FileNotFoundException(blobName); - + return new FileStreamResult(result.Value, MimeHelper.GetMimeType(blobName)) { FileDownloadName = blobName, diff --git a/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerUploadExtensions.cs b/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerUploadExtensions.cs index 500858a..719546a 100644 --- a/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerUploadExtensions.cs +++ b/Integraions/ManagedCode.Storage.Server/Extensions/Controller/ControllerUploadExtensions.cs @@ -30,17 +30,17 @@ private static StorageServerOptions ResolveServerOptions(ControllerBase controll return services?.GetService() ?? new StorageServerOptions(); } - /// - /// Uploads a form file to storage and returns blob metadata. - /// - public static async Task UploadFormFileAsync( - this ControllerBase controller, - IStorage storage, - IFormFile file, - UploadOptions? uploadOptions = null, - CancellationToken cancellationToken = default) -{ - uploadOptions ??= new UploadOptions(file.FileName, mimeType: file.ContentType); + /// + /// Uploads a form file to storage and returns blob metadata. + /// + public static async Task UploadFormFileAsync( + this ControllerBase controller, + IStorage storage, + IFormFile file, + UploadOptions? uploadOptions = null, + CancellationToken cancellationToken = default) + { + uploadOptions ??= new UploadOptions(file.FileName, mimeType: file.ContentType); var serverOptions = ResolveServerOptions(controller); if (file.Length > serverOptions.InMemoryUploadThresholdBytes) @@ -55,35 +55,35 @@ public static async Task UploadFormFileAsync( var uploadResult = await storage.UploadAsync(stream, uploadOptions, cancellationToken); uploadResult.ThrowIfFail(); return uploadResult.Value!; -} + } -/// -/// Uploads a browser file (Blazor) to storage. -/// -public static async Task UploadFromBrowserFileAsync( - this ControllerBase controller, - IStorage storage, - IBrowserFile file, - UploadOptions? uploadOptions = null, - CancellationToken cancellationToken = default) -{ - uploadOptions ??= new UploadOptions(file.Name, mimeType: file.ContentType); + /// + /// Uploads a browser file (Blazor) to storage. + /// + public static async Task UploadFromBrowserFileAsync( + this ControllerBase controller, + IStorage storage, + IBrowserFile file, + UploadOptions? uploadOptions = null, + CancellationToken cancellationToken = default) + { + uploadOptions ??= new UploadOptions(file.Name, mimeType: file.ContentType); - var serverOptions = ResolveServerOptions(controller); + var serverOptions = ResolveServerOptions(controller); - if (file.Size > serverOptions.InMemoryUploadThresholdBytes) - { - var localFile = await file.ToLocalFileAsync(cancellationToken); - var result = await storage.UploadAsync(localFile.FileInfo, uploadOptions, cancellationToken); - result.ThrowIfFail(); - return result.Value!; - } + if (file.Size > serverOptions.InMemoryUploadThresholdBytes) + { + var localFile = await file.ToLocalFileAsync(cancellationToken); + var result = await storage.UploadAsync(localFile.FileInfo, uploadOptions, cancellationToken); + result.ThrowIfFail(); + return result.Value!; + } - await using var stream = file.OpenReadStream(); - var uploadResult = await storage.UploadAsync(stream, uploadOptions, cancellationToken); - uploadResult.ThrowIfFail(); - return uploadResult.Value!; -} + await using var stream = file.OpenReadStream(); + var uploadResult = await storage.UploadAsync(stream, uploadOptions, cancellationToken); + uploadResult.ThrowIfFail(); + return uploadResult.Value!; + } /// /// Appends a chunk to the current upload session. @@ -121,52 +121,52 @@ public static void AbortChunkUpload( chunkUploadService.Abort(uploadId); } -/// -/// Uploads content from the raw request stream. -/// -public static async Task UploadFromStreamAsync( - this ControllerBase controller, - IStorage storage, - HttpRequest request, - UploadOptions? uploadOptions = null, - CancellationToken cancellationToken = default) -{ - if (!StreamHelper.IsMultipartContentType(request.ContentType)) + /// + /// Uploads content from the raw request stream. + /// + public static async Task UploadFromStreamAsync( + this ControllerBase controller, + IStorage storage, + HttpRequest request, + UploadOptions? uploadOptions = null, + CancellationToken cancellationToken = default) { - throw new InvalidOperationException("Not a multipart request"); - } + if (!StreamHelper.IsMultipartContentType(request.ContentType)) + { + throw new InvalidOperationException("Not a multipart request"); + } - var serverOptions = ResolveServerOptions(controller); + var serverOptions = ResolveServerOptions(controller); - var boundary = StreamHelper.GetBoundary( - MediaTypeHeaderValue.Parse(request.ContentType), - serverOptions.MultipartBoundaryLengthLimit); + var boundary = StreamHelper.GetBoundary( + MediaTypeHeaderValue.Parse(request.ContentType), + serverOptions.MultipartBoundaryLengthLimit); - var multipartReader = new MultipartReader(boundary, request.Body); - var section = await multipartReader.ReadNextSectionAsync(cancellationToken); + var multipartReader = new MultipartReader(boundary, request.Body); + var section = await multipartReader.ReadNextSectionAsync(cancellationToken); - while (section != null) - { - if (ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition) - && StreamHelper.HasFileContentDisposition(contentDisposition)) + while (section != null) { - var fileName = contentDisposition.FileName.Value; - var contentType = section.ContentType; + if (ContentDispositionHeaderValue.TryParse(section.ContentDisposition, out var contentDisposition) + && StreamHelper.HasFileContentDisposition(contentDisposition)) + { + var fileName = contentDisposition.FileName.Value; + var contentType = section.ContentType; - uploadOptions ??= new UploadOptions(fileName, mimeType: contentType); + uploadOptions ??= new UploadOptions(fileName, mimeType: contentType); - using var memoryStream = new MemoryStream(); - await section.Body.CopyToAsync(memoryStream, cancellationToken); - memoryStream.Position = 0; + using var memoryStream = new MemoryStream(); + await section.Body.CopyToAsync(memoryStream, cancellationToken); + memoryStream.Position = 0; - var result = await storage.UploadAsync(memoryStream, uploadOptions, cancellationToken); - result.ThrowIfFail(); - return result.Value!; + var result = await storage.UploadAsync(memoryStream, uploadOptions, cancellationToken); + result.ThrowIfFail(); + return result.Value!; + } + + section = await multipartReader.ReadNextSectionAsync(cancellationToken); } - section = await multipartReader.ReadNextSectionAsync(cancellationToken); + throw new InvalidOperationException("No file found in request"); } - - throw new InvalidOperationException("No file found in request"); -} } diff --git a/Integraions/ManagedCode.Storage.Server/Extensions/Storage/StorageFromFileExtensions.cs b/Integraions/ManagedCode.Storage.Server/Extensions/Storage/StorageFromFileExtensions.cs index dff0103..38b692e 100644 --- a/Integraions/ManagedCode.Storage.Server/Extensions/Storage/StorageFromFileExtensions.cs +++ b/Integraions/ManagedCode.Storage.Server/Extensions/Storage/StorageFromFileExtensions.cs @@ -27,7 +27,7 @@ public static async Task> UploadToStorageAsync(this IStorag { var newOptions = new UploadOptions(formFile.FileName, mimeType: formFile.ContentType); options.Invoke(newOptions); - + await using var stream = formFile.OpenReadStream(); return await storage.UploadAsync(stream, newOptions, cancellationToken); } diff --git a/ManagedCode.Storage.Core/BaseStorage.cs b/ManagedCode.Storage.Core/BaseStorage.cs index 227c779..28be9f1 100644 --- a/ManagedCode.Storage.Core/BaseStorage.cs +++ b/ManagedCode.Storage.Core/BaseStorage.cs @@ -31,7 +31,7 @@ public async Task CreateContainerAsync(CancellationToken cancellationTok { await _semaphoreSlim.WaitAsync(cancellationToken); cancellationToken.ThrowIfCancellationRequested(); - + var result = await CreateContainerInternalAsync(cancellationToken); cancellationToken.ThrowIfCancellationRequested(); IsContainerCreated = result.IsSuccess; @@ -135,7 +135,7 @@ public Task> UploadAsync(FileInfo fileInfo, UploadOptions o { options.FileName = fileInfo.Name; } - + return UploadInternalAsync(fileInfo.OpenRead(), SetUploadOptions(options), cancellationToken); } @@ -333,7 +333,7 @@ protected abstract Task SetLegalHoldInternalAsync(bool hasLegalHold, Leg public void Dispose() { - if(StorageClient is IDisposable disposable) + if (StorageClient is IDisposable disposable) disposable.Dispose(); } } \ No newline at end of file diff --git a/ManagedCode.Storage.Core/Constants/MetadataKeys.cs b/ManagedCode.Storage.Core/Constants/MetadataKeys.cs index 78f932a..65eef90 100644 --- a/ManagedCode.Storage.Core/Constants/MetadataKeys.cs +++ b/ManagedCode.Storage.Core/Constants/MetadataKeys.cs @@ -13,13 +13,13 @@ public static class MetadataKeys public const string LastAccessed = "last_accessed"; public const string Created = "created"; public const string Modified = "modified"; - + // FTP specific public const string FtpRawPermissions = "ftp_raw_permissions"; public const string FtpFileType = "ftp_file_type"; public const string FtpSize = "ftp_size"; public const string FtpModifyTime = "ftp_modify_time"; - + // Cloud storage metadata public const string ContentEncoding = "content_encoding"; public const string ContentLanguage = "content_language"; @@ -27,34 +27,34 @@ public static class MetadataKeys public const string ETag = "etag"; public const string ContentHash = "content_hash"; public const string StorageClass = "storage_class"; - + // Azure specific public const string AzureBlobType = "azure_blob_type"; public const string AzureAccessTier = "azure_access_tier"; public const string AzureServerEncrypted = "azure_server_encrypted"; - + // AWS specific public const string AwsStorageClass = "aws_storage_class"; public const string AwsServerSideEncryption = "aws_server_side_encryption"; public const string AwsVersionId = "aws_version_id"; - + // Google Cloud specific public const string GcsStorageClass = "gcs_storage_class"; public const string GcsGeneration = "gcs_generation"; public const string GcsMetageneration = "gcs_metageneration"; - + // Media metadata public const string ImageWidth = "image_width"; public const string ImageHeight = "image_height"; public const string VideoDuration = "video_duration"; public const string AudioBitrate = "audio_bitrate"; - + // Custom application metadata public const string ApplicationName = "app_name"; public const string ApplicationVersion = "app_version"; public const string UserId = "user_id"; public const string SessionId = "session_id"; - + // Processing metadata public const string ProcessingStatus = "processing_status"; public const string ThumbnailGenerated = "thumbnail_generated"; @@ -76,7 +76,7 @@ public static class FileTypes public const string SymbolicLink = "symbolic_link"; public const string Unknown = "unknown"; } - + // Processing statuses public static class ProcessingStatus { @@ -85,14 +85,14 @@ public static class ProcessingStatus public const string Completed = "completed"; public const string Failed = "failed"; } - + // Boolean values public static class Boolean { public const string True = "true"; public const string False = "false"; } - + // Storage classes public static class StorageClasses { diff --git a/ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs b/ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs index 2bc2fe6..7e35391 100644 --- a/ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs +++ b/ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs @@ -11,5 +11,5 @@ public static IServiceCollection AddStorageFactory(this IServiceCollection servi serviceCollection.TryAddSingleton(); return serviceCollection; } - + } \ No newline at end of file diff --git a/ManagedCode.Storage.Core/Helpers/PathHelper.cs b/ManagedCode.Storage.Core/Helpers/PathHelper.cs index cd7677e..d5baca5 100644 --- a/ManagedCode.Storage.Core/Helpers/PathHelper.cs +++ b/ManagedCode.Storage.Core/Helpers/PathHelper.cs @@ -82,7 +82,7 @@ public static string CombinePaths(char separator, params string[] paths) return string.Empty; var result = paths[0] ?? string.Empty; - + for (int i = 1; i < paths.Length; i++) { var path = paths[i]; @@ -91,11 +91,11 @@ public static string CombinePaths(char separator, params string[] paths) // Remove leading separators from current path path = path.TrimStart('/', '\\'); - + // Ensure result doesn't end with separator (unless it's root) if (result.Length > 0 && result[^1] != separator) result += separator; - + result += path; } @@ -147,10 +147,10 @@ public static string EnsureAbsolutePath(string? path, char separator = '/') return separator.ToString(); var normalizedPath = NormalizePath(path, separator); - + if (normalizedPath[0] != separator) normalizedPath = separator + normalizedPath; - + return normalizedPath; } @@ -200,9 +200,9 @@ public static string GetFileName(string? path) var normalizedPath = NormalizePath(path); var lastSeparatorIndex = normalizedPath.LastIndexOfAny(new[] { '/', '\\' }); - - return lastSeparatorIndex >= 0 - ? normalizedPath[(lastSeparatorIndex + 1)..] + + return lastSeparatorIndex >= 0 + ? normalizedPath[(lastSeparatorIndex + 1)..] : normalizedPath; } } \ No newline at end of file diff --git a/ManagedCode.Storage.Core/IDownloader.cs b/ManagedCode.Storage.Core/IDownloader.cs index 1309edf..a5af99a 100644 --- a/ManagedCode.Storage.Core/IDownloader.cs +++ b/ManagedCode.Storage.Core/IDownloader.cs @@ -18,7 +18,7 @@ public interface IDownloader /// A cancellation token that can be used to cancel the operation. /// A task that represents the asynchronous operation. The task result contains the downloaded file. Task> DownloadAsync(string fileName, CancellationToken cancellationToken = default); - + /// /// Downloads a file asynchronously using blob metadata. /// @@ -30,7 +30,7 @@ public interface IDownloader /// The downloaded file is returned as a LocalFile object wrapped in a Result type for error handling. /// Task> DownloadAsync(BlobMetadata metadata, CancellationToken cancellationToken = default); - + /// /// Downloads a file asynchronously with the specified download options. /// diff --git a/ManagedCode.Storage.Core/IStorageOperations.cs b/ManagedCode.Storage.Core/IStorageOperations.cs index 176e606..76badfe 100644 --- a/ManagedCode.Storage.Core/IStorageOperations.cs +++ b/ManagedCode.Storage.Core/IStorageOperations.cs @@ -50,7 +50,7 @@ public interface IStorageOperations Task> ExistsAsync(ExistOptions options, CancellationToken cancellationToken = default); - /// + /// /// Asynchronously checks if a file exists with the provided exist options. /// /// An action that configures the exist options. diff --git a/ManagedCode.Storage.Core/IUploader.cs b/ManagedCode.Storage.Core/IUploader.cs index c382bc6..bf5f875 100644 --- a/ManagedCode.Storage.Core/IUploader.cs +++ b/ManagedCode.Storage.Core/IUploader.cs @@ -42,7 +42,7 @@ public interface IUploader /// A task that represents the asynchronous operation. The task result contains the metadata of the uploaded blob. Task> UploadAsync(FileInfo fileInfo, CancellationToken cancellationToken = default); - + /// /// Asynchronously uploads the provided stream data to the storage with the specified upload options. /// diff --git a/ManagedCode.Storage.Core/Models/MetadataOptions.cs b/ManagedCode.Storage.Core/Models/MetadataOptions.cs index 383fbd2..47e8ccf 100644 --- a/ManagedCode.Storage.Core/Models/MetadataOptions.cs +++ b/ManagedCode.Storage.Core/Models/MetadataOptions.cs @@ -6,6 +6,6 @@ public static MetadataOptions FromBaseOptions(BaseOptions options) { return new MetadataOptions { FileName = options.FileName, Directory = options.Directory }; } - + public string ETag { get; set; } = string.Empty; } \ No newline at end of file diff --git a/ManagedCode.Storage.Core/Providers/IStorageFactory.cs b/ManagedCode.Storage.Core/Providers/IStorageFactory.cs index 99376e1..70db7b7 100644 --- a/ManagedCode.Storage.Core/Providers/IStorageFactory.cs +++ b/ManagedCode.Storage.Core/Providers/IStorageFactory.cs @@ -6,13 +6,13 @@ public interface IStorageFactory { IStorage CreateStorage(IStorageOptions options); IStorage CreateStorage(Action options); - - TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions; - - TStorage CreateStorage(Action options) - where TStorage : class, IStorage + + TStorage CreateStorage(Action options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions; } } diff --git a/ManagedCode.Storage.Core/Providers/IStorageProvider.cs b/ManagedCode.Storage.Core/Providers/IStorageProvider.cs index 48afec0..a128a37 100644 --- a/ManagedCode.Storage.Core/Providers/IStorageProvider.cs +++ b/ManagedCode.Storage.Core/Providers/IStorageProvider.cs @@ -5,11 +5,11 @@ namespace ManagedCode.Storage.Core.Providers public interface IStorageProvider { Type StorageOptionsType { get; } - TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions; - - + + IStorageOptions GetDefaultOptions(); } } diff --git a/ManagedCode.Storage.Core/Providers/StorageFactory.cs b/ManagedCode.Storage.Core/Providers/StorageFactory.cs index 879240e..a1a7982 100644 --- a/ManagedCode.Storage.Core/Providers/StorageFactory.cs +++ b/ManagedCode.Storage.Core/Providers/StorageFactory.cs @@ -10,7 +10,7 @@ public StorageFactory(IEnumerable providers) { Providers = providers.ToDictionary(p => p.StorageOptionsType, p => p); } - + private IStorageProvider? GetProvider(Type optionsType) { return Providers @@ -22,9 +22,9 @@ public StorageFactory(IEnumerable providers) public IStorage CreateStorage(IStorageOptions options) { - var provider = GetProvider(options.GetType()) + var provider = GetProvider(options.GetType()) ?? throw new NotSupportedException($"Provider for {options.GetType()} not found"); - + return provider.CreateStorage(options); } @@ -32,7 +32,7 @@ public IStorage CreateStorage(Action options) { var provider = GetProvider(options.GetType()) ?? throw new NotSupportedException($"Provider for {options.GetType()} not found"); - + var storageOptions = provider.GetDefaultOptions(); options.Invoke(storageOptions); return CreateStorage(storageOptions); @@ -44,7 +44,7 @@ public TStorage CreateStorage(TOptions options) { var provider = GetProvider(typeof(TOptions)) ?? throw new NotSupportedException($"Provider for {typeof(TOptions)} not found"); - + return provider.CreateStorage(options); } @@ -54,7 +54,7 @@ public TStorage CreateStorage(Action options) { var provider = GetProvider(typeof(TOptions)) ?? throw new NotSupportedException($"Provider for {typeof(TOptions)} not found"); - + TOptions storageOptions = (TOptions)provider.GetDefaultOptions(); options.Invoke(storageOptions); return provider.CreateStorage(storageOptions); diff --git a/ManagedCode.Storage.Core/Utf8StringStream.cs b/ManagedCode.Storage.Core/Utf8StringStream.cs index 0677f12..b578b4a 100644 --- a/ManagedCode.Storage.Core/Utf8StringStream.cs +++ b/ManagedCode.Storage.Core/Utf8StringStream.cs @@ -23,7 +23,7 @@ public sealed class Utf8StringStream : Stream public Utf8StringStream(string text) { ArgumentNullException.ThrowIfNull(text); - + // Use UTF-8 encoding directly to byte array - most efficient for large strings var byteCount = Encoding.UTF8.GetByteCount(text); var buffer = new byte[byteCount]; @@ -51,11 +51,11 @@ public Utf8StringStream(ReadOnlyMemory utf8Bytes) public static Utf8StringStream CreatePooled(string text, ArrayPool? arrayPool = null) { ArgumentNullException.ThrowIfNull(text); - + arrayPool ??= ArrayPool.Shared; var byteCount = Encoding.UTF8.GetByteCount(text); var rentedArray = arrayPool.Rent(byteCount); - + try { var actualLength = Encoding.UTF8.GetBytes(text, rentedArray); @@ -99,7 +99,7 @@ public override int Read(Span buffer) public override async ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) { cancellationToken.ThrowIfCancellationRequested(); - + // Since we're reading from memory, this is synchronous but we await for API compliance await Task.CompletedTask; return ReadCore(buffer.Span); @@ -109,14 +109,14 @@ private int ReadCore(Span destination) { var remaining = _buffer.Length - _position; var bytesToRead = Math.Min(destination.Length, remaining); - + if (bytesToRead <= 0) return 0; var source = _buffer.Span.Slice(_position, bytesToRead); source.CopyTo(destination); _position += bytesToRead; - + return bytesToRead; } diff --git a/ManagedCode.Storage.VirtualFileSystem/Extensions/ServiceCollectionExtensions.cs b/ManagedCode.Storage.VirtualFileSystem/Extensions/ServiceCollectionExtensions.cs index 142e2e6..b95e1b9 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Extensions/ServiceCollectionExtensions.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Extensions/ServiceCollectionExtensions.cs @@ -45,11 +45,11 @@ public static IServiceCollection AddVirtualFileSystem( // Register core services services.TryAddSingleton(); - + // Register VFS services services.TryAddScoped(); services.TryAddSingleton(); - + // Register metadata manager (this will be overridden by storage-specific registrations) services.TryAddScoped(); @@ -115,7 +115,7 @@ public override async Task SetVfsMetadataAsync( _logger.LogDebug("Setting VFS metadata for: {BlobName}", blobName); var metadataDict = BuildMetadataDictionary(metadata, customMetadata); - + // Use the storage provider's metadata setting capability // Note: This is a simplified implementation. Real implementation would depend on the storage provider try @@ -129,7 +129,7 @@ public override async Task SetVfsMetadataAsync( { existingMetadata[kvp.Key] = kvp.Value; } - + // Note: Most storage providers don't have a direct "set metadata" operation // This would typically require re-uploading the blob with new metadata _logger.LogWarning("Metadata update not fully implemented for this storage provider"); @@ -233,7 +233,7 @@ public async Task MountAsync( { if (string.IsNullOrWhiteSpace(mountPoint)) throw new ArgumentException("Mount point cannot be null or empty", nameof(mountPoint)); - + if (storage == null) throw new ArgumentNullException(nameof(storage)); @@ -248,14 +248,14 @@ public async Task MountAsync( var cache = _serviceProvider.GetRequiredService(); var loggerFactory = _serviceProvider.GetRequiredService(); var metadataManager = new DefaultMetadataManager(storage, loggerFactory.CreateLogger()); - + var vfsOptions = Microsoft.Extensions.Options.Options.Create(options ?? new VfsOptions()); var vfsLogger = loggerFactory.CreateLogger(); - + var vfs = new Implementations.VirtualFileSystem(storage, metadataManager, vfsOptions, cache, vfsLogger); _mounts[mountPoint] = vfs; - + _logger.LogInformation("Storage mounted successfully at: {MountPoint}", mountPoint); } @@ -340,12 +340,12 @@ public async ValueTask DisposeAsync() if (!_disposed) { _logger.LogDebug("Disposing VirtualFileSystemManager"); - + foreach (var vfs in _mounts.Values) { await vfs.DisposeAsync(); } - + _mounts.Clear(); _disposed = true; } diff --git a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualDirectory.cs b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualDirectory.cs index 020c555..58cc3f7 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualDirectory.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualDirectory.cs @@ -24,7 +24,7 @@ public class VirtualDirectory : IVirtualDirectory private readonly IMemoryCache _cache; private readonly ILogger _logger; private readonly VfsPath _path; - + private VfsMetadata? _vfsMetadata; private bool _metadataLoaded; @@ -70,7 +70,7 @@ public async ValueTask ExistsAsync(CancellationToken cancellationToken = d public async Task RefreshAsync(CancellationToken cancellationToken = default) { _logger.LogDebug("Refreshing directory metadata: {Path}", _path); - + // For virtual directories, we might not have explicit metadata unless using a directory strategy // that creates marker files if (_vfs.Options.DirectoryStrategy != DirectoryStrategy.Virtual) @@ -78,7 +78,7 @@ public async Task RefreshAsync(CancellationToken cancellationToken = default) var markerKey = GetDirectoryMarkerKey(); _vfsMetadata = await _metadataManager.GetVfsMetadataAsync(markerKey, cancellationToken); } - + _metadataLoaded = true; } @@ -97,7 +97,7 @@ public async IAsyncEnumerable GetFilesAsync( [EnumeratorCancellation] CancellationToken cancellationToken = default) { _logger.LogDebug("Getting files: {Path}, recursive: {Recursive}", _path, recursive); - + await foreach (var entry in GetEntriesInternalAsync(pattern, recursive, pageSize, true, false, cancellationToken)) { if (entry is IVirtualFile file) @@ -115,7 +115,7 @@ public async IAsyncEnumerable GetDirectoriesAsync( [EnumeratorCancellation] CancellationToken cancellationToken = default) { _logger.LogDebug("Getting directories: {Path}, recursive: {Recursive}", _path, recursive); - + await foreach (var entry in GetEntriesInternalAsync(pattern, recursive, pageSize, false, true, cancellationToken)) { if (entry is IVirtualDirectory directory) @@ -133,7 +133,7 @@ public async IAsyncEnumerable GetEntriesAsync( [EnumeratorCancellation] CancellationToken cancellationToken = default) { _logger.LogDebug("Getting entries: {Path}, recursive: {Recursive}", _path, recursive); - + await foreach (var entry in GetEntriesInternalAsync(pattern, recursive, pageSize, true, true, cancellationToken)) { yield return entry; @@ -177,7 +177,7 @@ async ValueTask OnEntryYieldedAsync() prefix += "/"; var directories = new HashSet(); - + await foreach (var blob in _vfs.Storage.GetBlobMetadataListAsync(prefix, cancellationToken)) { if (blob is null) @@ -192,7 +192,7 @@ async ValueTask OnEntryYieldedAsync() var relativePath = blob.FullName.Length > prefix.Length ? blob.FullName[prefix.Length..] : blob.FullName; - + if (string.IsNullOrEmpty(relativePath)) continue; @@ -235,12 +235,12 @@ async ValueTask OnEntryYieldedAsync() { var pathParts = relativePath.Split('/'); var currentPath = ""; - + for (int i = 0; i < pathParts.Length - 1; i++) // Exclude the file name itself { if (i > 0) currentPath += "/"; currentPath += pathParts[i]; - + if (directories.Add(currentPath)) { if (pattern == null || pattern.IsMatch(pathParts[i])) @@ -265,12 +265,12 @@ public async ValueTask CreateFileAsync( throw new ArgumentException("File name cannot be null or empty", nameof(name)); options ??= new CreateFileOptions(); - + _logger.LogDebug("Creating file: {Path}/{Name}", _path, name); - + var filePath = _path.Combine(name); var file = await _vfs.GetFileAsync(filePath, cancellationToken); - + if (await file.ExistsAsync(cancellationToken) && !options.Overwrite) { throw new VfsAlreadyExistsException(filePath); @@ -285,7 +285,7 @@ public async ValueTask CreateFileAsync( }; await file.WriteAllBytesAsync(Array.Empty(), writeOptions, cancellationToken); - + return file; } @@ -298,7 +298,7 @@ public async ValueTask CreateDirectoryAsync( throw new ArgumentException("Directory name cannot be null or empty", nameof(name)); _logger.LogDebug("Creating directory: {Path}/{Name}", _path, name); - + var dirPath = _path.Combine(name); var directory = await _vfs.GetDirectoryAsync(dirPath, cancellationToken); @@ -337,7 +337,7 @@ public async Task GetStatsAsync( CancellationToken cancellationToken = default) { _logger.LogDebug("Getting directory stats: {Path}, recursive: {Recursive}", _path, recursive); - + var fileCount = 0; var directoryCount = 0; var totalSize = 0L; @@ -352,23 +352,23 @@ public async Task GetStatsAsync( { fileCount++; totalSize += file.Size; - + var extension = System.IO.Path.GetExtension(file.Name).ToLowerInvariant(); if (string.IsNullOrEmpty(extension)) extension = "(no extension)"; - + filesByExtension[extension] = filesByExtension.GetValueOrDefault(extension, 0) + 1; - + if (largestFile == null || file.Size > largestFile.Size) { largestFile = file; } - + if (oldestModified == null || file.LastModified < oldestModified) { oldestModified = file.LastModified; } - + if (newestModified == null || file.LastModified > newestModified) { newestModified = file.LastModified; diff --git a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFile.cs b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFile.cs index a7c8d50..cdaa449 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFile.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFile.cs @@ -26,7 +26,7 @@ public class VirtualFile : IVirtualFile private readonly IMemoryCache _cache; private readonly ILogger _logger; private readonly VfsPath _path; - + private BlobMetadata? _blobMetadata; private VfsMetadata? _vfsMetadata; private bool _metadataLoaded; @@ -129,11 +129,11 @@ public async Task OpenReadAsync( CancellationToken cancellationToken = default) { options ??= new StreamOptions(); - + _logger.LogDebug("Opening read stream: {Path}", _path); - + await EnsureMetadataLoadedAsync(cancellationToken); - + if (_blobMetadata == null) { throw new VfsNotFoundException(_path); @@ -142,7 +142,7 @@ public async Task OpenReadAsync( try { var result = await _vfs.Storage.GetStreamAsync(_path.ToBlobKey(), cancellationToken); - + if (!result.IsSuccess || result.Value == null) { throw new VfsOperationException($"Failed to open read stream for file: {_path}"); @@ -163,9 +163,9 @@ public async Task OpenWriteAsync( CancellationToken cancellationToken = default) { options ??= new WriteOptions(); - + _logger.LogDebug("Opening write stream: {Path}", _path); - + if (!options.Overwrite && await ExistsAsync(cancellationToken)) { throw new VfsAlreadyExistsException(_path); @@ -196,19 +196,19 @@ public async ValueTask ReadRangeAsync( CancellationToken cancellationToken = default) { _logger.LogDebug("Reading range: {Path}, offset: {Offset}, count: {Count}", _path, offset, count); - + await using var stream = await OpenReadAsync( - new StreamOptions { RangeStart = offset, RangeEnd = offset + count - 1 }, + new StreamOptions { RangeStart = offset, RangeEnd = offset + count - 1 }, cancellationToken); - + var buffer = new byte[count]; var bytesRead = await stream.ReadAsync(buffer, 0, count, cancellationToken); - + if (bytesRead < count) { Array.Resize(ref buffer, bytesRead); } - + return buffer; } @@ -216,7 +216,7 @@ public async ValueTask ReadRangeAsync( public async Task ReadAllBytesAsync(CancellationToken cancellationToken = default) { _logger.LogDebug("Reading all bytes: {Path}", _path); - + await using var stream = await OpenReadAsync(cancellationToken: cancellationToken); using var memoryStream = new MemoryStream(); await stream.CopyToAsync(memoryStream, cancellationToken); @@ -229,9 +229,9 @@ public async Task ReadAllTextAsync( CancellationToken cancellationToken = default) { encoding ??= Encoding.UTF8; - + _logger.LogDebug("Reading all text: {Path}", _path); - + var bytes = await ReadAllBytesAsync(cancellationToken); return encoding.GetString(bytes); } @@ -243,7 +243,7 @@ public async Task WriteAllBytesAsync( CancellationToken cancellationToken = default) { _logger.LogDebug("Writing all bytes: {Path}, size: {Size}", _path, bytes.Length); - + await using var stream = await OpenWriteAsync(options, cancellationToken); await stream.WriteAsync(bytes, 0, bytes.Length, cancellationToken); } @@ -256,9 +256,9 @@ public async Task WriteAllTextAsync( CancellationToken cancellationToken = default) { encoding ??= Encoding.UTF8; - + _logger.LogDebug("Writing all text: {Path}, length: {Length}", _path, text.Length); - + var bytes = encoding.GetBytes(text); await WriteAllBytesAsync(bytes, options, cancellationToken); } @@ -268,7 +268,7 @@ public async ValueTask> GetMetadataAsync( CancellationToken cancellationToken = default) { var cacheKey = $"file_custom_metadata:{_vfs.ContainerName}:{_path}"; - + if (_vfs.Options.EnableCache && _cache.TryGetValue(cacheKey, out IReadOnlyDictionary cached)) { _logger.LogDebug("File metadata (cached): {Path}", _path); @@ -276,7 +276,7 @@ public async ValueTask> GetMetadataAsync( } var metadata = await _metadataManager.GetCustomMetadataAsync(_path.ToBlobKey(), cancellationToken); - + if (_vfs.Options.EnableCache) { _cache.Set(cacheKey, metadata, _vfs.Options.CacheTTL); @@ -287,7 +287,7 @@ public async ValueTask> GetMetadataAsync( _cache.Set(metadataKey, entry, _vfs.Options.CacheTTL); } } - + _logger.LogDebug("File metadata: {Path}, count: {Count}", _path, metadata.Count); return metadata; } @@ -299,7 +299,7 @@ public async Task SetMetadataAsync( CancellationToken cancellationToken = default) { _logger.LogDebug("Setting metadata: {Path}, count: {Count}", _path, metadata.Count); - + if (!string.IsNullOrEmpty(expectedETag)) { await EnsureMetadataLoadedAsync(cancellationToken); @@ -337,7 +337,7 @@ await _metadataManager.SetVfsMetadataAsync( public async Task StartMultipartUploadAsync(CancellationToken cancellationToken = default) { _logger.LogDebug("Starting multipart upload: {Path}", _path); - + // This is a simplified implementation - real multipart upload would depend on the storage provider throw new VfsNotSupportedException("Multipart upload", "Not yet implemented in this version"); } diff --git a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFileSystem.cs b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFileSystem.cs index c245f92..2fbc3d3 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFileSystem.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFileSystem.cs @@ -41,7 +41,7 @@ public VirtualFileSystem( _cache = cache ?? throw new ArgumentNullException(nameof(cache)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); _options = options.Value ?? throw new ArgumentNullException("options.Value"); - + ContainerName = _options.DefaultContainer; } @@ -68,9 +68,9 @@ public ValueTask GetFileAsync(VfsPath path, CancellationToken canc public async ValueTask FileExistsAsync(VfsPath path, CancellationToken cancellationToken = default) { ThrowIfDisposed(); - + var cacheKey = $"file_exists:{ContainerName}:{path}"; - + if (_options.EnableCache && _cache.TryGetValue(cacheKey, out bool cached)) { _logger.LogDebug("File exists check (cached): {Path} = {Exists}", path, cached); @@ -81,12 +81,12 @@ public async ValueTask FileExistsAsync(VfsPath path, CancellationToken can { var blobInfo = await _metadataManager.GetBlobInfoAsync(path.ToBlobKey(), cancellationToken); var exists = blobInfo != null; - + if (_options.EnableCache) { _cache.Set(cacheKey, exists, _options.CacheTTL); } - + _logger.LogDebug("File exists check: {Path} = {Exists}", path, exists); return exists; } @@ -101,13 +101,13 @@ public async ValueTask FileExistsAsync(VfsPath path, CancellationToken can public async ValueTask DeleteFileAsync(VfsPath path, CancellationToken cancellationToken = default) { ThrowIfDisposed(); - + _logger.LogDebug("Deleting file: {Path}", path); - + try { var result = await _storage.DeleteAsync(path.ToBlobKey(), cancellationToken); - + if (result.IsSuccess && result.Value) { if (_options.EnableCache) @@ -123,7 +123,7 @@ public async ValueTask DeleteFileAsync(VfsPath path, CancellationToken can _logger.LogDebug("File deleted successfully: {Path}", path); return true; } - + _logger.LogDebug("File delete failed: {Path}", path); return false; } @@ -148,9 +148,9 @@ public ValueTask GetDirectoryAsync(VfsPath path, Cancellation public async ValueTask DirectoryExistsAsync(VfsPath path, CancellationToken cancellationToken = default) { ThrowIfDisposed(); - + var cacheKey = $"dir_exists:{ContainerName}:{path}"; - + if (_options.EnableCache && _cache.TryGetValue(cacheKey, out bool cached)) { _logger.LogDebug("Directory exists check (cached): {Path} = {Exists}", path, cached); @@ -170,16 +170,16 @@ public async ValueTask DirectoryExistsAsync(VfsPath path, CancellationToke { _cache.Set(cacheKey, true, _options.CacheTTL); } - + _logger.LogDebug("Directory exists check: {Path} = true", path); return true; } - + if (_options.EnableCache) { _cache.Set(cacheKey, false, _options.CacheTTL); } - + _logger.LogDebug("Directory exists check: {Path} = false", path); return false; } @@ -192,16 +192,16 @@ public async ValueTask DirectoryExistsAsync(VfsPath path, CancellationToke /// public async Task DeleteDirectoryAsync( - VfsPath path, - bool recursive = false, + VfsPath path, + bool recursive = false, CancellationToken cancellationToken = default) { ThrowIfDisposed(); - + _logger.LogDebug("Deleting directory: {Path}, recursive: {Recursive}", path, recursive); - + var result = new DeleteDirectoryResult { Success = true }; - + try { var prefix = path.ToBlobKey(); @@ -209,7 +209,7 @@ public async Task DeleteDirectoryAsync( prefix += "/"; var filesToDelete = new List(); - + await foreach (var blob in _storage.GetBlobMetadataListAsync(prefix, cancellationToken)) { // For non-recursive, only delete direct children @@ -222,10 +222,10 @@ public async Task DeleteDirectoryAsync( continue; } } - + filesToDelete.Add(blob.FullName); } - + // Delete files foreach (var fileName in filesToDelete) { @@ -247,18 +247,18 @@ public async Task DeleteDirectoryAsync( _logger.LogWarning(ex, "Error deleting file: {FileName}", fileName); } } - + // Invalidate cache if (_options.EnableCache) { var cacheKey = $"dir_exists:{ContainerName}:{path}"; _cache.Remove(cacheKey); } - + result.Success = result.Errors.Count == 0; - _logger.LogDebug("Directory delete completed: {Path}, files deleted: {FilesDeleted}, errors: {ErrorCount}", + _logger.LogDebug("Directory delete completed: {Path}, files deleted: {FilesDeleted}, errors: {ErrorCount}", path, result.FilesDeleted, result.Errors.Count); - + return result; } catch (Exception ex) @@ -272,23 +272,23 @@ public async Task DeleteDirectoryAsync( /// public async Task MoveAsync( - VfsPath source, - VfsPath destination, - MoveOptions? options = null, + VfsPath source, + VfsPath destination, + MoveOptions? options = null, CancellationToken cancellationToken = default) { ThrowIfDisposed(); options ??= new MoveOptions(); - + _logger.LogDebug("Moving: {Source} -> {Destination}", source, destination); - + // For now, implement as copy + delete - await CopyAsync(source, destination, new CopyOptions - { + await CopyAsync(source, destination, new CopyOptions + { Overwrite = options.Overwrite, PreserveMetadata = options.PreserveMetadata }, null, cancellationToken); - + // Delete source if (await FileExistsAsync(source, cancellationToken)) { @@ -298,23 +298,23 @@ public async Task MoveAsync( { await DeleteDirectoryAsync(source, true, cancellationToken); } - + _logger.LogDebug("Move completed: {Source} -> {Destination}", source, destination); } /// public async Task CopyAsync( - VfsPath source, - VfsPath destination, - CopyOptions? options = null, - IProgress? progress = null, + VfsPath source, + VfsPath destination, + CopyOptions? options = null, + IProgress? progress = null, CancellationToken cancellationToken = default) { ThrowIfDisposed(); options ??= new CopyOptions(); - + _logger.LogDebug("Copying: {Source} -> {Destination}", source, destination); - + // Check if source is a file if (await FileExistsAsync(source, cancellationToken)) { @@ -335,39 +335,39 @@ public async Task CopyAsync( { throw new VfsNotFoundException(source); } - + _logger.LogDebug("Copy completed: {Source} -> {Destination}", source, destination); } private async Task CopyFileAsync( - VfsPath source, - VfsPath destination, - CopyOptions options, - IProgress? progress, + VfsPath source, + VfsPath destination, + CopyOptions options, + IProgress? progress, CancellationToken cancellationToken) { var sourceFile = await GetFileAsync(source, cancellationToken); var destinationFile = await GetFileAsync(destination, cancellationToken); - + if (await destinationFile.ExistsAsync(cancellationToken) && !options.Overwrite) { throw new VfsAlreadyExistsException(destination); } - - progress?.Report(new CopyProgress - { - TotalFiles = 1, + + progress?.Report(new CopyProgress + { + TotalFiles = 1, TotalBytes = sourceFile.Size, CurrentFile = source }); - + // Copy content await using var sourceStream = await sourceFile.OpenReadAsync(cancellationToken: cancellationToken); await using var destinationStream = await destinationFile.OpenWriteAsync( new WriteOptions { Overwrite = options.Overwrite }, cancellationToken); - + await sourceStream.CopyToAsync(destinationStream, cancellationToken); - + // Copy metadata if requested if (options.PreserveMetadata) { @@ -378,10 +378,10 @@ private async Task CopyFileAsync( await destinationFile.SetMetadataAsync(metadataDict, cancellationToken: cancellationToken); } } - - progress?.Report(new CopyProgress - { - TotalFiles = 1, + + progress?.Report(new CopyProgress + { + TotalFiles = 1, CopiedFiles = 1, TotalBytes = sourceFile.Size, CopiedBytes = sourceFile.Size, @@ -390,18 +390,18 @@ private async Task CopyFileAsync( } private async Task CopyDirectoryAsync( - VfsPath source, - VfsPath destination, - CopyOptions options, - IProgress? progress, + VfsPath source, + VfsPath destination, + CopyOptions options, + IProgress? progress, CancellationToken cancellationToken) { var sourceDir = await GetDirectoryAsync(source, cancellationToken); - + // Calculate total work for progress reporting var totalFiles = 0; var totalBytes = 0L; - + await foreach (var entry in sourceDir.GetEntriesAsync(recursive: true, cancellationToken: cancellationToken)) { if (entry.Type == VfsEntryType.File && entry is IVirtualFile file) @@ -410,10 +410,10 @@ private async Task CopyDirectoryAsync( totalBytes += file.Size; } } - + var copiedFiles = 0; var copiedBytes = 0L; - + await foreach (var entry in sourceDir.GetEntriesAsync(recursive: true, cancellationToken: cancellationToken)) { if (entry.Type == VfsEntryType.File && entry is IVirtualFile sourceFile) @@ -421,28 +421,28 @@ private async Task CopyDirectoryAsync( var relativePath = entry.Path.Value[source.Value.Length..].TrimStart('/'); var destPath = destination.Combine(relativePath); var destFile = await GetFileAsync(destPath, cancellationToken); - + if (await destFile.ExistsAsync(cancellationToken) && !options.Overwrite) { continue; // Skip existing files } - - progress?.Report(new CopyProgress - { + + progress?.Report(new CopyProgress + { TotalFiles = totalFiles, CopiedFiles = copiedFiles, TotalBytes = totalBytes, CopiedBytes = copiedBytes, CurrentFile = entry.Path }); - + // Copy file content await using var sourceStream = await sourceFile.OpenReadAsync(cancellationToken: cancellationToken); await using var destStream = await destFile.OpenWriteAsync( new WriteOptions { Overwrite = options.Overwrite }, cancellationToken); - + await sourceStream.CopyToAsync(destStream, cancellationToken); - + // Copy metadata if requested if (options.PreserveMetadata) { @@ -453,14 +453,14 @@ private async Task CopyDirectoryAsync( await destFile.SetMetadataAsync(metadataDict, cancellationToken: cancellationToken); } } - + copiedFiles++; copiedBytes += sourceFile.Size; } } - - progress?.Report(new CopyProgress - { + + progress?.Report(new CopyProgress + { TotalFiles = totalFiles, CopiedFiles = copiedFiles, TotalBytes = totalBytes, @@ -472,44 +472,44 @@ private async Task CopyDirectoryAsync( public async ValueTask GetEntryAsync(VfsPath path, CancellationToken cancellationToken = default) { ThrowIfDisposed(); - + if (await FileExistsAsync(path, cancellationToken)) { return await GetFileAsync(path, cancellationToken); } - + if (await DirectoryExistsAsync(path, cancellationToken)) { return await GetDirectoryAsync(path, cancellationToken); } - + return null; } /// public async IAsyncEnumerable ListAsync( - VfsPath path, - ListOptions? options = null, + VfsPath path, + ListOptions? options = null, [EnumeratorCancellation] CancellationToken cancellationToken = default) { ThrowIfDisposed(); options ??= new ListOptions(); - + var directory = await GetDirectoryAsync(path, cancellationToken); var pageSize = options.PageSize > 0 ? options.PageSize : _options.DefaultPageSize; await foreach (var entry in directory.GetEntriesAsync( - options.Pattern, - options.Recursive, - pageSize, + options.Pattern, + options.Recursive, + pageSize, cancellationToken)) { if (entry.Type == VfsEntryType.File && !options.IncludeFiles) continue; - + if (entry.Type == VfsEntryType.Directory && !options.IncludeDirectories) continue; - + yield return entry; } } diff --git a/ManagedCode.Storage.VirtualFileSystem/Options/VfsOptions.cs b/ManagedCode.Storage.VirtualFileSystem/Options/VfsOptions.cs index 74d4f78..96ef33f 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Options/VfsOptions.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Options/VfsOptions.cs @@ -258,7 +258,7 @@ private static bool IsWildcardMatch(string pattern, string input, StringComparis while (inputIndex < input.Length) { - if (patternIndex < pattern.Length && (pattern[patternIndex] == '?' || + if (patternIndex < pattern.Length && (pattern[patternIndex] == '?' || string.Equals(pattern[patternIndex].ToString(), input[inputIndex].ToString(), comparison))) { patternIndex++; diff --git a/ManagedCode.Storage.VirtualFileSystem/Streaming/VfsWriteStream.cs b/ManagedCode.Storage.VirtualFileSystem/Streaming/VfsWriteStream.cs index 8f6ac96..b5fba8f 100644 --- a/ManagedCode.Storage.VirtualFileSystem/Streaming/VfsWriteStream.cs +++ b/ManagedCode.Storage.VirtualFileSystem/Streaming/VfsWriteStream.cs @@ -39,7 +39,7 @@ public VfsWriteStream( _cache = cache ?? throw new ArgumentNullException(nameof(cache)); _vfsOptions = vfsOptions ?? throw new ArgumentNullException(nameof(vfsOptions)); _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - + _buffer = new MemoryStream(); } @@ -156,7 +156,7 @@ private async Task UploadBufferedDataAsync() try { _buffer.Position = 0; - + var uploadOptions = new UploadOptions(_blobKey) { MimeType = _options.ContentType, @@ -164,7 +164,7 @@ private async Task UploadBufferedDataAsync() }; var result = await _storage.UploadAsync(_buffer, uploadOptions); - + if (!result.IsSuccess) { throw new VfsOperationException($"Failed to upload data for: {_blobKey}. Error: {result.Problem}"); diff --git a/README.md b/README.md index efe746c..f0b0709 100644 --- a/README.md +++ b/README.md @@ -1,19 +1,95 @@ -![img|300x200](https://raw.githubusercontent.com/managedcode/Storage/main/logo.png) +![ManagedCode.Storage logo](https://raw.githubusercontent.com/managedcode/Storage/main/logo.png) # ManagedCode.Storage [![CI](https://github.com/managedcode/Storage/actions/workflows/ci.yml/badge.svg?branch=main)](https://github.com/managedcode/Storage/actions/workflows/ci.yml) +[![Docs](https://github.com/managedcode/Storage/actions/workflows/jekyll-gh-pages.yml/badge.svg?branch=main)](https://github.com/managedcode/Storage/actions/workflows/jekyll-gh-pages.yml) [![Release](https://github.com/managedcode/Storage/actions/workflows/release.yml/badge.svg?branch=main)](https://github.com/managedcode/Storage/actions/workflows/release.yml) [![CodeQL](https://github.com/managedcode/Storage/actions/workflows/codeql-analysis.yml/badge.svg?branch=main)](https://github.com/managedcode/Storage/actions/workflows/codeql-analysis.yml) [![Codecov](https://codecov.io/gh/managedcode/Storage/graph/badge.svg?token=OMKP91GPVD)](https://codecov.io/gh/managedcode/Storage) [![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=managedcode_Storage&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=managedcode_Storage) [![Coverage](https://sonarcloud.io/api/project_badges/measure?project=managedcode_Storage&metric=coverage)](https://sonarcloud.io/summary/new_code?id=managedcode_Storage) +[![MCAF](https://img.shields.io/badge/MCAF-enabled-785D8F)](https://mcaf.managed-code.com/) +[![.NET](https://img.shields.io/badge/.NET-10.0-512BD4?logo=dotnet)](https://dotnet.microsoft.com/) [![License: MIT](https://img.shields.io/badge/License-MIT-blue.svg)](LICENSE) [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Core.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Core) Cross-provider blob storage toolkit for .NET and ASP.NET streaming scenarios. -ManagedCode.Storage wraps vendor SDKs behind a single `IStorage` abstraction so uploads, downloads, metadata, streaming, and retention behave the same regardless of provider. Swap between Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, a local file system, or the in-memory Virtual File System without rewriting application code. Pair it with our ASP.NET controllers and SignalR client to deliver chunked uploads, ranged downloads, and progress notifications end to end. +## Documentation + +- Published docs (GitHub Pages): https://managedcode.github.io/Storage/ +- Source docs live in `docs/`: + - Setup: `docs/Development/setup.md` + - Credentials (OneDrive/Google Drive/Dropbox/CloudKit): `docs/Development/credentials.md` + - Testing strategy: `docs/Testing/strategy.md` + - Feature docs: `docs/Features/index.md` + - ADRs: `docs/ADR/index.md` + - API (HTTP + SignalR): `docs/API/storage-server.md` +- Diagrams are Mermaid-based and are expected to render on GitHub and the docs site. + +## Table of Contents + +- [Motivation](#motivation) +- [Features](#features) +- [Packages](#packages) +- [Architecture](#architecture) +- [Virtual File System (VFS)](#virtual-file-system-vfs) +- [Dependency Injection & Keyed Registrations](#dependency-injection--keyed-registrations) +- [ASP.NET Controllers & Streaming](#aspnet-controllers--streaming) +- [Connection modes](#connection-modes) +- [How to use](#how-to-use) + +## Quickstart + +### 1) Install a provider package + +```bash +dotnet add package ManagedCode.Storage.FileSystem +``` + +### 2) Register as default `IStorage` + +```csharp +using ManagedCode.Storage.Core; +using ManagedCode.Storage.FileSystem.Extensions; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddFileSystemStorageAsDefault(options => +{ + options.BaseFolder = Path.Combine(builder.Environment.ContentRootPath, "storage"); +}); +``` + +### 3) Use `IStorage` + +```csharp +using ManagedCode.Storage.Core; + +public sealed class MyService(IStorage storage) +{ + public Task UploadAsync(CancellationToken ct) => + storage.UploadAsync("hello", options => options.FileName = "hello.txt", ct); +} +``` + +### 4) (Optional) Expose HTTP + SignalR endpoints + +```csharp +using ManagedCode.Storage.Server.Extensions.DependencyInjection; +using ManagedCode.Storage.Server.Extensions; + +builder.Services.AddControllers(); +builder.Services.AddStorageServer(); +builder.Services.AddStorageSignalR(); // optional + +var app = builder.Build(); +app.MapControllers(); // /api/storage/* +app.MapStorageHub(); // /hubs/storage +``` + +ManagedCode.Storage wraps vendor SDKs behind a single `IStorage` abstraction so uploads, downloads, metadata, streaming, and retention behave the same regardless of provider. Swap between Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, and a local file system without rewriting application code — and optionally use the Virtual File System (VFS) overlay for a file/directory API on top of any configured `IStorage`. Pair it with our ASP.NET controllers and SignalR client to deliver chunked uploads, ranged downloads, and progress notifications end to end. ## Motivation @@ -22,12 +98,12 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa ## Features - Unified `IStorage` abstraction covering upload, download, streaming, metadata, deletion, container management, and legal hold operations backed by `Result` responses. -- Provider coverage across Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive (Microsoft Graph), Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, local file system, and the in-memory Virtual File System (VFS). +- Provider coverage across Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive (Microsoft Graph), Google Drive, Dropbox, CloudKit (iCloud app data), SFTP, and the local file system. - Keyed dependency-injection registrations plus default provider helpers to fan out files per tenant, region, or workload without manual service plumbing. - ASP.NET storage controllers, chunk orchestration services, and a SignalR hub/client pair that deliver resumable uploads, ranged downloads, CRC32 validation, and real-time progress. - `ManagedCode.Storage.Client` brings streaming uploads/downloads, CRC32 helpers, and MIME discovery via `MimeHelper` to any .NET app. - Strongly typed option objects (`UploadOptions`, `DownloadOptions`, `DeleteOptions`, `MetadataOptions`, `LegalHoldOptions`, etc.) let you configure directories, metadata, and legal holds in one place. -- Virtual File System package keeps everything in memory for lightning-fast tests, developer sandboxes, and local demos while still exercising the same abstractions. +- Virtual File System package provides a file/directory API (`IVirtualFileSystem`) on top of the configured `IStorage` and can cache metadata for faster repeated operations. - Comprehensive automated test suite with cross-provider sync fixtures, multi-gigabyte streaming simulations (4 MB units per "GB"), ASP.NET controller harnesses, and SFTP/local filesystem coverage. - ManagedCode.Storage.TestFakes package plus Testcontainers-based fixtures make it easy to run offline or CI tests without touching real cloud accounts. @@ -38,7 +114,7 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa | Package | Latest | Description | | --- | --- | --- | | [ManagedCode.Storage.Core](https://www.nuget.org/packages/ManagedCode.Storage.Core) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.Core.svg)](https://www.nuget.org/packages/ManagedCode.Storage.Core) | Core abstractions, option models, CRC32/MIME helpers, and DI extensions. | -| [ManagedCode.Storage.VirtualFileSystem](https://www.nuget.org/packages/ManagedCode.Storage.VirtualFileSystem) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.VirtualFileSystem.svg)](https://www.nuget.org/packages/ManagedCode.Storage.VirtualFileSystem) | In-memory storage built on the `IStorage` surface for tests and sandboxes. | +| [ManagedCode.Storage.VirtualFileSystem](https://www.nuget.org/packages/ManagedCode.Storage.VirtualFileSystem) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.VirtualFileSystem.svg)](https://www.nuget.org/packages/ManagedCode.Storage.VirtualFileSystem) | Virtual file system overlay on top of `IStorage` (file/directory API + caching; not a provider). | | [ManagedCode.Storage.TestFakes](https://www.nuget.org/packages/ManagedCode.Storage.TestFakes) | [![NuGet](https://img.shields.io/nuget/v/ManagedCode.Storage.TestFakes.svg)](https://www.nuget.org/packages/ManagedCode.Storage.TestFakes) | Provider doubles for unit/integration tests without hitting cloud services. | ### Providers @@ -60,6 +136,8 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa > iCloud Drive does not expose a public server-side file API. `ManagedCode.Storage.CloudKit` targets CloudKit Web Services (iCloud app data), not iCloud Drive. +Credential guide: `docs/Development/credentials.md`. + These providers follow the same DI patterns as the other backends: use `Add*StorageAsDefault(...)` to bind `IStorage`, or `Add*Storage(...)` to inject the provider interface (`IOneDriveStorage`, `IGoogleDriveStorage`, `IDropboxStorage`, `ICloudKitStorage`). Most cloud-drive providers expect you to create the official SDK client (Graph/Drive/Dropbox) with your preferred auth flow and pass it into the storage options. ManagedCode.Storage does not run OAuth flows automatically. @@ -67,16 +145,12 @@ Most cloud-drive providers expect you to create the official SDK client (Graph/D Keyed registrations are available as well (useful for multi-tenant apps): ```csharp -using Dropbox.Api; using ManagedCode.Storage.Core; using ManagedCode.Storage.Dropbox.Extensions; -var accessToken = configuration["Dropbox:AccessToken"]; // obtained via OAuth (see Dropbox section below) -var dropboxClient = new DropboxClient(accessToken); - builder.Services.AddDropboxStorageAsDefault("tenant-a", options => { - options.DropboxClient = dropboxClient; + options.AccessToken = configuration["Dropbox:AccessToken"]; // obtained via OAuth (see Dropbox section below) options.RootPath = "/apps/my-app"; }); @@ -383,30 +457,38 @@ Controllers remain thin: consumers can inherit and override actions to add custo ## Virtual File System (VFS) -Need to hydrate storage dependencies without touching disk or the cloud? The ManagedCode.Storage.VirtualFileSystem package keeps everything in memory and makes it trivial to stand up repeatable tests or developer sandboxes: +Want a file/directory API on top of any configured `IStorage` (with optional metadata caching)? The ManagedCode.Storage.VirtualFileSystem package provides `IVirtualFileSystem`, which routes all operations through your registered storage provider. ```csharp -// Program.cs / Startup.cs -builder.Services.AddVirtualFileSystemStorageAsDefault(options => +using ManagedCode.Storage.FileSystem.Extensions; +using ManagedCode.Storage.VirtualFileSystem.Core; +using ManagedCode.Storage.VirtualFileSystem.Extensions; + +// 1) Register any IStorage provider (example: FileSystem) +builder.Services.AddFileSystemStorageAsDefault(options => { - options.StorageName = "vfs"; // optional logical name + options.BaseFolder = Path.Combine(builder.Environment.ContentRootPath, "storage"); }); -// Usage -public class MyService +// 2) Add VFS overlay +builder.Services.AddVirtualFileSystem(options => { - private readonly IStorage storage; - - public MyService(IStorage storage) => this.storage = storage; + options.DefaultContainer = "vfs"; + options.EnableCache = true; +}); - public Task UploadAsync(Stream stream, string path) => storage.UploadAsync(stream, new UploadOptions(path)); +// 3) Use IVirtualFileSystem +public sealed class MyVfsService(IVirtualFileSystem vfs) +{ + public async Task WriteAsync(CancellationToken ct) + { + var file = await vfs.GetFileAsync("avatars/user-1.png", ct); + await file.WriteAllTextAsync("hello", cancellationToken: ct); + } } - -// In tests you can pre-populate the VFS -await storage.UploadAsync(new FileInfo("fixtures/avatar.png"), new UploadOptions("avatars/user-1.png")); ``` -Because the VFS implements the same abstractions as every other provider, you can swap it for in-memory integration tests while hitting Azure, S3, etc. in production. +VFS is an overlay: it does not replace your provider. In tests, pair VFS with `ManagedCode.Storage.TestFakes` or the FileSystem provider pointed at a temp folder to avoid real cloud accounts. ## Dependency Injection & Keyed Registrations @@ -505,7 +587,7 @@ builder.Services.AddStorageServer(options => options.InMemoryUploadThresholdBytes = 512 * 1024; // spill to disk after 512 KB }); -app.MapControllers(); // exposes /storage endpoints +app.MapControllers(); // exposes /api/storage/* endpoints by default ``` When you need custom routes, validation, or policies, inherit from the base controller and reuse the same streaming helpers: @@ -536,8 +618,10 @@ Need resumable uploads or live progress UI? Call AddStorageSignalR() Need parallel S3 buckets? Register them with AddAWSStorage("aws-backup", ...) and inject via [FromKeyedServices("aws-backup")]. +> Need parallel GCS buckets? Register them with AddGCPStorage("gcp-secondary", ...) and inject via [FromKeyedServices("gcp-secondary")]. @@ -675,12 +759,14 @@ Default mode connection: ```cs // Startup.cs -//aws libarary overwrites property values. you should only create configurations this way. -var awsConfig = new AmazonS3Config(); -awsConfig.RegionEndpoint = RegionEndpoint.EUWest1; -awsConfig.ForcePathStyle = true; -awsConfig.UseHttp = true; -awsConfig.ServiceURL = "http://localhost:4566"; //this is the default port for the aws s3 emulator, must be last in the list +// Tip for LocalStack: configure the client and set ServiceURL to the emulator endpoint. +var awsConfig = new AmazonS3Config +{ + RegionEndpoint = RegionEndpoint.EUWest1, + ForcePathStyle = true, + UseHttp = true, + ServiceURL = "http://localhost:4566" // LocalStack default endpoint +}; services.AddAWSStorageAsDefault(opt => { diff --git a/Storages/ManagedCode.Storage.Aws/AWSStorage.cs b/Storages/ManagedCode.Storage.Aws/AWSStorage.cs index f1c6420..07c4144 100644 --- a/Storages/ManagedCode.Storage.Aws/AWSStorage.cs +++ b/Storages/ManagedCode.Storage.Aws/AWSStorage.cs @@ -56,17 +56,17 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st { var objectsResponse = await StorageClient.ListObjectsAsync(objectsRequest, cancellationToken); - if(cancellationToken.IsCancellationRequested) + if (cancellationToken.IsCancellationRequested) yield break; - + if (objectsResponse?.S3Objects == null) yield break; - + foreach (var entry in objectsResponse.S3Objects) { - if(cancellationToken.IsCancellationRequested) + if (cancellationToken.IsCancellationRequested) yield break; - + var objectMetaRequest = new GetObjectMetadataRequest { BucketName = StorageOptions.Bucket, @@ -133,7 +133,7 @@ protected override async Task CreateContainerInternalAsync(CancellationT await StorageClient.EnsureBucketExistsAsync(StorageOptions.Bucket); cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(); } catch (Exception ex) @@ -156,7 +156,7 @@ protected override async Task DeleteDirectoryInternalAsync(string direct await StorageClient.DeleteAsync(StorageOptions.Bucket, item.Name, null, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); } - + return Result.Succeed(); } catch (Exception ex) @@ -219,7 +219,7 @@ protected override async Task> DownloadInternalAsync(LocalFile await localFile.CopyFromStreamAsync(await StorageClient.GetObjectStreamAsync(StorageOptions.Bucket, options.FullPath, null, cancellationToken), cancellationToken); - + cancellationToken.ThrowIfCancellationRequested(); return Result.Succeed(localFile); } @@ -248,7 +248,7 @@ await StorageClient.DeleteObjectAsync(new DeleteObjectRequest }, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(true); } catch (Exception ex) @@ -323,7 +323,7 @@ protected override async Task SetLegalHoldInternalAsync(bool hasLegalHol { await EnsureContainerExist(cancellationToken); cancellationToken.ThrowIfCancellationRequested(); - + var status = hasLegalHold ? ObjectLockLegalHoldStatus.On : ObjectLockLegalHoldStatus.Off; PutObjectLegalHoldRequest request = new() @@ -338,7 +338,7 @@ protected override async Task SetLegalHoldInternalAsync(bool hasLegalHol await StorageClient.PutObjectLegalHoldAsync(request, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(); } catch (Exception ex) diff --git a/Storages/ManagedCode.Storage.Aws/AWSStorageProvider.cs b/Storages/ManagedCode.Storage.Aws/AWSStorageProvider.cs index a4fc90e..9965e57 100644 --- a/Storages/ManagedCode.Storage.Aws/AWSStorageProvider.cs +++ b/Storages/ManagedCode.Storage.Aws/AWSStorageProvider.cs @@ -11,9 +11,9 @@ namespace ManagedCode.Storage.Aws public class AWSStorageProvider(IServiceProvider serviceProvider, AWSStorageOptions defaultOptions) : IStorageProvider { public Type StorageOptionsType => typeof(AWSStorageOptions); - - public TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions { if (options is not AWSStorageOptions azureOptions) @@ -24,7 +24,7 @@ public TStorage CreateStorage(TOptions options) var logger = serviceProvider.GetService>(); var storage = new AWSStorage(azureOptions, logger); - return storage as TStorage + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); } diff --git a/Storages/ManagedCode.Storage.Aws/BlobStream.cs b/Storages/ManagedCode.Storage.Aws/BlobStream.cs index be7cd4c..5a908e6 100644 --- a/Storages/ManagedCode.Storage.Aws/BlobStream.cs +++ b/Storages/ManagedCode.Storage.Aws/BlobStream.cs @@ -99,10 +99,10 @@ private void Flush(bool disposing) if (_metadata.UploadId == null) _metadata.UploadId = _s3.InitiateMultipartUploadAsync(new InitiateMultipartUploadRequest - { - BucketName = _metadata.BucketName, - Key = _metadata.Key - }) + { + BucketName = _metadata.BucketName, + Key = _metadata.Key + }) .GetAwaiter() .GetResult() .UploadId; @@ -139,14 +139,14 @@ private void CompleteUpload() if (Length > 0) _s3.CompleteMultipartUploadAsync(new CompleteMultipartUploadRequest - { - BucketName = _metadata.BucketName, - Key = _metadata.Key, - PartETags = _metadata.PartETags + { + BucketName = _metadata.BucketName, + Key = _metadata.Key, + PartETags = _metadata.PartETags .Select(e => new PartETag(e.Key, e.Value)) .ToList(), - UploadId = _metadata.UploadId - }) + UploadId = _metadata.UploadId + }) .GetAwaiter() .GetResult(); } diff --git a/Storages/ManagedCode.Storage.Aws/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Aws/Extensions/ServiceCollectionExtensions.cs index 2c412be..62b63d2 100644 --- a/Storages/ManagedCode.Storage.Aws/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.Aws/Extensions/ServiceCollectionExtensions.cs @@ -52,7 +52,7 @@ public static IServiceCollection AddAWSStorage(this IServiceCollection serviceCo var options = new AWSStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { @@ -68,7 +68,7 @@ public static IServiceCollection AddAWSStorageAsDefault(this IServiceCollection var options = new AWSStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { diff --git a/Storages/ManagedCode.Storage.Aws/Extensions/StorageFactoryExtensions.cs b/Storages/ManagedCode.Storage.Aws/Extensions/StorageFactoryExtensions.cs index 46fdb9b..32f4862 100644 --- a/Storages/ManagedCode.Storage.Aws/Extensions/StorageFactoryExtensions.cs +++ b/Storages/ManagedCode.Storage.Aws/Extensions/StorageFactoryExtensions.cs @@ -8,17 +8,17 @@ public static class StorageFactoryExtensions { public static IAWSStorage CreateAWSStorage(this IStorageFactory factory, string bucketName) { - return factory.CreateStorage(options => options.Bucket = bucketName); + return factory.CreateStorage(options => options.Bucket = bucketName); } - + public static IAWSStorage CreateAWSStorage(this IStorageFactory factory, AWSStorageOptions options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } - - + + public static IAWSStorage CreateAWSStorage(this IStorageFactory factory, Action options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.Aws/Options/AWSStorageOptions.cs b/Storages/ManagedCode.Storage.Aws/Options/AWSStorageOptions.cs index 9fc8771..f1b2499 100644 --- a/Storages/ManagedCode.Storage.Aws/Options/AWSStorageOptions.cs +++ b/Storages/ManagedCode.Storage.Aws/Options/AWSStorageOptions.cs @@ -41,7 +41,7 @@ public class AWSStorageOptions : IStorageOptions /// Whether to create the container if it does not exist. Default is true. /// public bool CreateContainerIfNotExists { get; set; } = true; - + /// /// Whether to use the instance profile credentials. Default is false. /// diff --git a/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorage.cs b/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorage.cs index b97164b..c15c38c 100644 --- a/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorage.cs +++ b/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorage.cs @@ -87,8 +87,8 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st Name = item.Name }; } - - if(cancellationToken.IsCancellationRequested) + + if (cancellationToken.IsCancellationRequested) yield break; } } @@ -197,10 +197,10 @@ protected override async Task> DownloadInternalAsync(LocalFile await fileStream.WriteAsync(buffer, 0, count, cancellationToken); await fileStream.FlushAsync(cancellationToken); - + fileStream.Close(); cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(localFile); } catch (Exception ex) @@ -274,7 +274,7 @@ protected override async Task> GetBlobMetadataInternalAsync protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default) { try - { + { await StorageClient.DeleteDirectoryAsync(directory, cancellationToken: cancellationToken); cancellationToken.ThrowIfCancellationRequested(); return Result.Succeed(); diff --git a/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorageProvider.cs b/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorageProvider.cs index e0ba0c3..a180b78 100644 --- a/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorageProvider.cs +++ b/Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorageProvider.cs @@ -11,9 +11,9 @@ namespace ManagedCode.Storage.Azure.DataLake public class AzureDataLakeStorageProvider(IServiceProvider serviceProvider, AzureDataLakeStorageOptions defaultOptions) : IStorageProvider { public Type StorageOptionsType => typeof(AzureDataLakeStorageOptions); - - public TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions { if (options is not AzureDataLakeStorageOptions azureOptions) @@ -24,7 +24,7 @@ public TStorage CreateStorage(TOptions options) var logger = serviceProvider.GetService>(); var storage = new AzureDataLakeStorage(azureOptions, logger); - return storage as TStorage + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); } diff --git a/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/ServiceCollectionExtensions.cs index 1ed9fe9..38d8840 100644 --- a/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/ServiceCollectionExtensions.cs @@ -47,13 +47,13 @@ public static IServiceCollection AddAzureDataLakeStorageAsDefault(this IServiceC serviceCollection.AddSingleton(); return serviceCollection.AddSingleton(); } - + public static IServiceCollection AddAzureDataLakeStorage(this IServiceCollection serviceCollection, string key, Action action) { var options = new AzureDataLakeStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { @@ -69,7 +69,7 @@ public static IServiceCollection AddAzureDataLakeStorageAsDefault(this IServiceC var options = new AzureDataLakeStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { diff --git a/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/StorageFactoryExtensions.cs b/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/StorageFactoryExtensions.cs index 113e462..46d7a7c 100644 --- a/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/StorageFactoryExtensions.cs +++ b/Storages/ManagedCode.Storage.Azure.DataLake/Extensions/StorageFactoryExtensions.cs @@ -8,17 +8,17 @@ public static class StorageFactoryExtensions { public static IAzureDataLakeStorage CreateAzureDataLakeStorage(this IStorageFactory factory, string fileSystemName) { - return factory.CreateStorage(options => options.FileSystem = fileSystemName); + return factory.CreateStorage(options => options.FileSystem = fileSystemName); } - + public static IAzureDataLakeStorage CreateAzureDataLakeStorage(this IStorageFactory factory, AzureDataLakeStorageOptions options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } - - + + public static IAzureDataLakeStorage CreateAzureDataLakeStorage(this IStorageFactory factory, Action options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.Azure/AzureStorage.cs b/Storages/ManagedCode.Storage.Azure/AzureStorage.cs index 4f1a006..575c21e 100644 --- a/Storages/ManagedCode.Storage.Azure/AzureStorage.cs +++ b/Storages/ManagedCode.Storage.Azure/AzureStorage.cs @@ -53,14 +53,14 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st .AsPages() .WithCancellation(cancellationToken)) { - if(cancellationToken.IsCancellationRequested) + if (cancellationToken.IsCancellationRequested) yield break; - + foreach (var blobItem in item.Values) { - if(cancellationToken.IsCancellationRequested) + if (cancellationToken.IsCancellationRequested) yield break; - + var blobMetadata = new BlobMetadata { FullName = blobItem.Name, @@ -186,9 +186,9 @@ protected override async Task CreateContainerInternalAsync(CancellationT logger.LogException(e); } } - + cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(); } catch (Exception ex) diff --git a/Storages/ManagedCode.Storage.Azure/AzureStorageProvider.cs b/Storages/ManagedCode.Storage.Azure/AzureStorageProvider.cs index 6431db5..cdfde85 100644 --- a/Storages/ManagedCode.Storage.Azure/AzureStorageProvider.cs +++ b/Storages/ManagedCode.Storage.Azure/AzureStorageProvider.cs @@ -11,9 +11,9 @@ namespace ManagedCode.Storage.Azure public class AzureStorageProvider(IServiceProvider serviceProvider, IAzureStorageOptions defaultOptions) : IStorageProvider { public Type StorageOptionsType => typeof(IAzureStorageOptions); - - public TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions { if (options is not IAzureStorageOptions azureOptions) @@ -24,7 +24,7 @@ public TStorage CreateStorage(TOptions options) var logger = serviceProvider.GetService>(); var storage = new AzureStorage(azureOptions, logger); - return storage as TStorage + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); } diff --git a/Storages/ManagedCode.Storage.Azure/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Azure/Extensions/ServiceCollectionExtensions.cs index 72d4316..d111e30 100644 --- a/Storages/ManagedCode.Storage.Azure/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.Azure/Extensions/ServiceCollectionExtensions.cs @@ -72,7 +72,7 @@ public static IServiceCollection AddAzureStorage(this IServiceCollection service var options = new AzureStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { @@ -88,7 +88,7 @@ public static IServiceCollection AddAzureStorageAsDefault(this IServiceCollectio var options = new AzureStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { diff --git a/Storages/ManagedCode.Storage.Azure/Extensions/StorageFactoryExtensions.cs b/Storages/ManagedCode.Storage.Azure/Extensions/StorageFactoryExtensions.cs index bf20f1d..3f2b49e 100644 --- a/Storages/ManagedCode.Storage.Azure/Extensions/StorageFactoryExtensions.cs +++ b/Storages/ManagedCode.Storage.Azure/Extensions/StorageFactoryExtensions.cs @@ -8,17 +8,17 @@ public static class StorageFactoryExtensions { public static IAzureStorage CreateAzureStorage(this IStorageFactory factory, string containerName) { - return factory.CreateStorage(options => options.Container = containerName); + return factory.CreateStorage(options => options.Container = containerName); } - + public static IAzureStorage CreateAzureStorage(this IStorageFactory factory, IAzureStorageOptions options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } - - + + public static IAzureStorage CreateAzureStorage(this IStorageFactory factory, Action options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs b/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs index 52b00d0..f5f9ffb 100644 --- a/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs +++ b/Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs @@ -21,6 +21,7 @@ public sealed class CloudKitClient : ICloudKitClient, IDisposable private readonly CloudKitStorageOptions _options; private readonly HttpClient _httpClient; private readonly bool _ownsHttpClient; + private readonly SemaphoreSlim _webAuthTokenSemaphore = new(1, 1); private readonly JsonSerializerOptions _jsonOptions = new() { PropertyNamingPolicy = JsonNamingPolicy.CamelCase, @@ -80,7 +81,7 @@ public async Task DeleteAsync(string recordName, CancellationToken cancell } }; - var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); + using var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) { if (errorCode == "NOT_FOUND") @@ -113,7 +114,7 @@ public async Task ExistsAsync(string recordName, CancellationToken cancell ["desiredKeys"] = new[] { _options.PathFieldName, _options.ContentTypeFieldName, _options.AssetFieldName } }; - var document = await SendCloudKitAsync("records/lookup", payload, cancellationToken); + using var document = await SendCloudKitAsync("records/lookup", payload, cancellationToken); if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) { if (errorCode == "NOT_FOUND") @@ -167,7 +168,7 @@ public async IAsyncEnumerable QueryByPathPrefixAsync(string path payload["continuationMarker"] = marker; } - var document = await SendCloudKitAsync("records/query", payload, cancellationToken); + using var document = await SendCloudKitAsync("records/query", payload, cancellationToken); if (document.RootElement.TryGetProperty("records", out var records) && records.ValueKind == JsonValueKind.Array) { foreach (var record in records.EnumerateArray()) @@ -201,7 +202,7 @@ private async Task GetAssetUploadUrlAsync(string recordName, CancellationTo } }; - var document = await SendCloudKitAsync("assets/upload", payload, cancellationToken); + using var document = await SendCloudKitAsync("assets/upload", payload, cancellationToken); if (!document.RootElement.TryGetProperty("tokens", out var tokens) || tokens.ValueKind != JsonValueKind.Array) { throw new InvalidOperationException("CloudKit assets/upload response does not include tokens."); @@ -272,7 +273,7 @@ private async Task UpsertRecordAsync(string recordName, string i } }; - var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); + using var document = await SendCloudKitAsync("records/modify", payload, cancellationToken); if (TryGetRecordErrorCode(document.RootElement, out var errorCode)) { throw new InvalidOperationException($"CloudKit modify failed with error code '{errorCode}'."); @@ -293,6 +294,24 @@ private async Task UpsertRecordAsync(string recordName, string i } private async Task SendCloudKitAsync(string operation, object payload, CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(_options.WebAuthToken)) + { + await _webAuthTokenSemaphore.WaitAsync(cancellationToken); + try + { + return await SendCloudKitCoreAsync(operation, payload, cancellationToken); + } + finally + { + _webAuthTokenSemaphore.Release(); + } + } + + return await SendCloudKitCoreAsync(operation, payload, cancellationToken); + } + + private async Task SendCloudKitCoreAsync(string operation, object payload, CancellationToken cancellationToken) { var subpath = BuildSubpath(operation); var uri = BuildUri(subpath); @@ -313,7 +332,36 @@ private async Task SendCloudKitAsync(string operation, object payl throw new HttpRequestException($"CloudKit request failed: {(int)response.StatusCode} {response.ReasonPhrase}", null, response.StatusCode); } - return JsonDocument.Parse(json); + var document = JsonDocument.Parse(json); + TryRotateWebAuthToken(document.RootElement); + return document; + } + + private void TryRotateWebAuthToken(JsonElement response) + { + if (string.IsNullOrWhiteSpace(_options.WebAuthToken)) + { + return; + } + + // CloudKit Web Services rotates ckWebAuthToken on each response when it was provided on the request. + if (response.ValueKind != JsonValueKind.Object || !response.TryGetProperty("ckWebAuthToken", out var tokenElement)) + { + return; + } + + if (tokenElement.ValueKind != JsonValueKind.String) + { + return; + } + + var rotated = tokenElement.GetString(); + if (string.IsNullOrWhiteSpace(rotated) || string.Equals(rotated, _options.WebAuthToken, StringComparison.Ordinal)) + { + return; + } + + _options.WebAuthToken = rotated; } private async Task DownloadFromUrlAsync(Uri downloadUrl, CancellationToken cancellationToken) diff --git a/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs index 63bd678..83bb4bf 100644 --- a/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs +++ b/Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs @@ -39,7 +39,8 @@ public class CloudKitStorageOptions : IStorageOptions public string? ApiToken { get; set; } /// - /// Optional user authentication token (ckWebAuthToken) for private database access. + /// Optional user authentication token (ckWebAuthToken) for user-scoped CloudKit requests. + /// Note: CloudKit rotates this token on each request; callers should treat it as single-use and persist the rotated value. /// public string? WebAuthToken { get; set; } @@ -63,4 +64,3 @@ public class CloudKitStorageOptions : IStorageOptions /// public ICloudKitClient? Client { get; set; } } - diff --git a/Storages/ManagedCode.Storage.FileSystem/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.FileSystem/Extensions/ServiceCollectionExtensions.cs index d5e465d..63be84e 100644 --- a/Storages/ManagedCode.Storage.FileSystem/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.FileSystem/Extensions/ServiceCollectionExtensions.cs @@ -43,14 +43,14 @@ public static IServiceCollection AddFileSystemStorage(this IServiceCollection se { var options = new FileSystemStorageOptions(); action.Invoke(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { var opts = sp.GetKeyedService(k); return new FileSystemStorage(opts); }); - + return serviceCollection; } @@ -58,16 +58,16 @@ public static IServiceCollection AddFileSystemStorageAsDefault(this IServiceColl { var options = new FileSystemStorageOptions(); action.Invoke(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { var opts = sp.GetKeyedService(k); return new FileSystemStorage(opts); }); - serviceCollection.AddKeyedSingleton(key, (sp, k) => + serviceCollection.AddKeyedSingleton(key, (sp, k) => sp.GetRequiredKeyedService(k)); - + return serviceCollection; } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.FileSystem/Extensions/StorageFactoryExtensions.cs b/Storages/ManagedCode.Storage.FileSystem/Extensions/StorageFactoryExtensions.cs index b021ffb..d5cbde6 100644 --- a/Storages/ManagedCode.Storage.FileSystem/Extensions/StorageFactoryExtensions.cs +++ b/Storages/ManagedCode.Storage.FileSystem/Extensions/StorageFactoryExtensions.cs @@ -8,17 +8,17 @@ public static class StorageFactoryExtensions { public static IFileSystemStorage CreateFileSystemStorage(this IStorageFactory factory, string baseFolder) { - return factory.CreateStorage(options => options.BaseFolder = baseFolder); + return factory.CreateStorage(options => options.BaseFolder = baseFolder); } - + public static IFileSystemStorage CreateFileSystemStorage(this IStorageFactory factory, FileSystemStorageOptions options) { return factory.CreateStorage(options); } - - + + public static IFileSystemStorage CreateFileSystemStorage(this IStorageFactory factory, Action options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.FileSystem/FileSystemStorage.cs b/Storages/ManagedCode.Storage.FileSystem/FileSystemStorage.cs index 71676e9..f29afdd 100644 --- a/Storages/ManagedCode.Storage.FileSystem/FileSystemStorage.cs +++ b/Storages/ManagedCode.Storage.FileSystem/FileSystemStorage.cs @@ -39,7 +39,7 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st [EnumeratorCancellation] CancellationToken cancellationToken = default) { await EnsureContainerExist(cancellationToken); - + if (cancellationToken.IsCancellationRequested) yield break; diff --git a/Storages/ManagedCode.Storage.FileSystem/FileSystemStorageProvider.cs b/Storages/ManagedCode.Storage.FileSystem/FileSystemStorageProvider.cs index c8f987d..3ac729c 100644 --- a/Storages/ManagedCode.Storage.FileSystem/FileSystemStorageProvider.cs +++ b/Storages/ManagedCode.Storage.FileSystem/FileSystemStorageProvider.cs @@ -9,9 +9,9 @@ namespace ManagedCode.Storage.FileSystem public class FileSystemStorageProvider(IServiceProvider serviceProvider, FileSystemStorageOptions defaultOptions) : IStorageProvider { public Type StorageOptionsType => typeof(FileSystemStorageOptions); - - public TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions { if (options is not FileSystemStorageOptions azureOptions) @@ -22,7 +22,7 @@ public TStorage CreateStorage(TOptions options) //var logger = serviceProvider.GetService>(); var storage = new FileSystemStorage(azureOptions); - return storage as TStorage + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); } diff --git a/Storages/ManagedCode.Storage.Google/Extensions/ServiceCollectionExtensions.cs b/Storages/ManagedCode.Storage.Google/Extensions/ServiceCollectionExtensions.cs index c995098..e005e3b 100644 --- a/Storages/ManagedCode.Storage.Google/Extensions/ServiceCollectionExtensions.cs +++ b/Storages/ManagedCode.Storage.Google/Extensions/ServiceCollectionExtensions.cs @@ -53,7 +53,7 @@ public static IServiceCollection AddGCPStorage(this IServiceCollection serviceCo var options = new GCPStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { @@ -69,7 +69,7 @@ public static IServiceCollection AddGCPStorageAsDefault(this IServiceCollection var options = new GCPStorageOptions(); action.Invoke(options); CheckConfiguration(options); - + serviceCollection.AddKeyedSingleton(key, options); serviceCollection.AddKeyedSingleton(key, (sp, k) => { diff --git a/Storages/ManagedCode.Storage.Google/Extensions/StorageFactoryExtensions.cs b/Storages/ManagedCode.Storage.Google/Extensions/StorageFactoryExtensions.cs index 0709383..1328b3b 100644 --- a/Storages/ManagedCode.Storage.Google/Extensions/StorageFactoryExtensions.cs +++ b/Storages/ManagedCode.Storage.Google/Extensions/StorageFactoryExtensions.cs @@ -8,17 +8,17 @@ public static class StorageFactoryExtensions { public static IGCPStorage CreateGCPStorage(this IStorageFactory factory, string containerName) { - return factory.CreateStorage(options => options.BucketOptions.Bucket = containerName); + return factory.CreateStorage(options => options.BucketOptions.Bucket = containerName); } - + public static IGCPStorage CreateGCPStorage(this IStorageFactory factory, GCPStorageOptions options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } - - + + public static IGCPStorage CreateGCPStorage(this IStorageFactory factory, Action options) { - return factory.CreateStorage(options); + return factory.CreateStorage(options); } } \ No newline at end of file diff --git a/Storages/ManagedCode.Storage.Google/GCPStorage.cs b/Storages/ManagedCode.Storage.Google/GCPStorage.cs index b880901..f511839 100644 --- a/Storages/ManagedCode.Storage.Google/GCPStorage.cs +++ b/Storages/ManagedCode.Storage.Google/GCPStorage.cs @@ -22,7 +22,7 @@ public class GCPStorage : BaseStorage, IGCPSto { private readonly ILogger? _logger; private readonly UrlSigner urlSigner; - + public GCPStorage(GCPStorageOptions options, ILogger? logger = null) : base(options) { _logger = logger; @@ -55,9 +55,9 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st await foreach (var item in pages.WithCancellation(cancellationToken)) { - if(item is null) + if (item is null) continue; - + if (cancellationToken.IsCancellationRequested) yield break; @@ -77,19 +77,19 @@ public override async IAsyncEnumerable GetBlobMetadataListAsync(st Metadata = item.Metadata?.ToDictionary(k => k.Key, v => v.Value) ?? new Dictionary() }; - + } catch (Exception e) { Console.WriteLine(e); throw; } - + yield return blobMetadata; } } - - + + public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default) { @@ -172,7 +172,7 @@ await StorageClient.CreateBucketAsync(StorageOptions.BucketOptions.ProjectId, St } cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(); } catch (GoogleApiException exception) when (exception.HttpStatusCode is HttpStatusCode.Conflict) @@ -191,21 +191,21 @@ protected override async Task DeleteDirectoryInternalAsync(string direct try { await EnsureContainerExist(cancellationToken); - + var blobs = StorageClient.ListObjectsAsync(StorageOptions.BucketOptions.Bucket, string.Empty, new ListObjectsOptions { Projection = Projection.Full }) .Select(x => x); cancellationToken.ThrowIfCancellationRequested(); - + await foreach (var blob in blobs.WithCancellation(cancellationToken)) { cancellationToken.ThrowIfCancellationRequested(); await StorageClient.DeleteObjectAsync(blob, cancellationToken: cancellationToken); } - + cancellationToken.ThrowIfCancellationRequested(); - + return Result.Succeed(); } catch (Exception ex) @@ -224,12 +224,12 @@ protected override async Task> UploadInternalAsync(Stream s var result = await StorageClient.UploadObjectAsync(StorageOptions.BucketOptions.Bucket, options.FullPath, options.MimeType, stream, null, cancellationToken); - + cancellationToken.ThrowIfCancellationRequested(); var metadataOptions = MetadataOptions.FromBaseOptions(options); metadataOptions.ETag = result.ETag; - + return await GetBlobMetadataInternalAsync(metadataOptions, cancellationToken); } catch (Exception ex) @@ -288,7 +288,7 @@ protected override async Task> ExistsInternalAsync(ExistOptions opt cancellationToken.ThrowIfCancellationRequested(); await StorageClient.GetObjectAsync(StorageOptions.BucketOptions.Bucket, options.FullPath, null, cancellationToken); cancellationToken.ThrowIfCancellationRequested(); - + //TODO: check logic return Result.Succeed(true); } @@ -362,7 +362,7 @@ protected override async Task> HasLegalHoldInternalAsync(LegalHoldO await EnsureContainerExist(cancellationToken); var storageObject = await StorageClient.GetObjectAsync(StorageOptions.BucketOptions.Bucket, options.FullPath, cancellationToken: cancellationToken); - + cancellationToken.ThrowIfCancellationRequested(); return Result.Succeed(storageObject.TemporaryHold ?? false); @@ -373,7 +373,7 @@ protected override async Task> HasLegalHoldInternalAsync(LegalHoldO return Result.Fail(ex); } } - + public static T GetFirstSuccessfulValue(T defaultValue, params Func[] getters) where T : struct { foreach (var getter in getters) diff --git a/Storages/ManagedCode.Storage.Google/GCPStorageProvider.cs b/Storages/ManagedCode.Storage.Google/GCPStorageProvider.cs index cf36f09..d670ff0 100644 --- a/Storages/ManagedCode.Storage.Google/GCPStorageProvider.cs +++ b/Storages/ManagedCode.Storage.Google/GCPStorageProvider.cs @@ -11,9 +11,9 @@ namespace ManagedCode.Storage.Google public class GCPStorageProvider(IServiceProvider serviceProvider, GCPStorageOptions defaultOptions) : IStorageProvider { public Type StorageOptionsType => typeof(GCPStorageOptions); - - public TStorage CreateStorage(TOptions options) - where TStorage : class, IStorage + + public TStorage CreateStorage(TOptions options) + where TStorage : class, IStorage where TOptions : class, IStorageOptions { if (options is not GCPStorageOptions azureOptions) @@ -24,7 +24,7 @@ public TStorage CreateStorage(TOptions options) var logger = serviceProvider.GetService>(); var storage = new GCPStorage(azureOptions, logger); - return storage as TStorage + return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}"); } @@ -35,7 +35,7 @@ public IStorageOptions GetDefaultOptions() AuthFileName = defaultOptions.AuthFileName, BucketOptions = new BucketOptions { - Bucket = defaultOptions.BucketOptions.Bucket, + Bucket = defaultOptions.BucketOptions.Bucket, ProjectId = defaultOptions.BucketOptions.ProjectId }, GoogleCredential = defaultOptions.GoogleCredential, diff --git a/Tests/ManagedCode.Storage.Tests/Common/EmptyContainer.cs b/Tests/ManagedCode.Storage.Tests/Common/EmptyContainer.cs index 3ef11d8..48d72d6 100644 --- a/Tests/ManagedCode.Storage.Tests/Common/EmptyContainer.cs +++ b/Tests/ManagedCode.Storage.Tests/Common/EmptyContainer.cs @@ -147,7 +147,7 @@ public Task CopyAsync(FileInfo source, string target, public TestcontainersStates State { get; } = TestcontainersStates.Running; public TestcontainersHealthStatus Health { get; } = TestcontainersHealthStatus.Healthy; public long HealthCheckFailingStreak { get; } = 0; - #pragma warning disable CS0067 +#pragma warning disable CS0067 public event EventHandler? Creating; public event EventHandler? Starting; public event EventHandler? Stopping; @@ -158,5 +158,5 @@ public Task CopyAsync(FileInfo source, string target, public event EventHandler? Stopped; public event EventHandler? Paused; public event EventHandler? Unpaused; - #pragma warning restore CS0067 +#pragma warning restore CS0067 } diff --git a/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs b/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs index 7621087..6929b3d 100644 --- a/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs +++ b/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs @@ -10,7 +10,7 @@ namespace ManagedCode.Storage.Tests.Common; public static class FileHelper { private static readonly Random Random = new(); - + public static LocalFile GenerateLocalFile(LocalFile localFile, int byteSize) { var fs = localFile.FileStream; @@ -78,14 +78,14 @@ public static IFormFile GenerateFormFile(string fileName, int byteSize) return formFile; } - + public static string GenerateRandomFileName() { string[] extensions = { "txt", "jpg", "png", "pdf", "docx", "xlsx", "pptx", "mp3", "mp4", "zip" }; var randomExtension = extensions[Random.Next(extensions.Length)]; return $"{Guid.NewGuid().ToString("N").ToLowerInvariant()}.{randomExtension}"; } - + public static string GenerateRandomFileContent(int charCount = 250_000) { const string chars = "ABCDEFGHIJKLMNOPQRSTUVWXYZ-0123456789_abcdefghijklmnopqrstuvwxyz"; diff --git a/Tests/ManagedCode.Storage.Tests/Common/StorageTestApplication.cs b/Tests/ManagedCode.Storage.Tests/Common/StorageTestApplication.cs index 547c710..d886a1e 100644 --- a/Tests/ManagedCode.Storage.Tests/Common/StorageTestApplication.cs +++ b/Tests/ManagedCode.Storage.Tests/Common/StorageTestApplication.cs @@ -98,10 +98,10 @@ protected override IHost CreateHost(IHostBuilder builder) } }); - + var config = new AmazonS3Config(); config.ServiceURL = _localStackContainer.GetConnectionString(); - + services.AddAWSStorage(new AWSStorageOptions { PublicKey = "localkey", diff --git a/Tests/ManagedCode.Storage.Tests/Common/TestApp/HttpHostProgram.cs b/Tests/ManagedCode.Storage.Tests/Common/TestApp/HttpHostProgram.cs index 3a98c35..7baaaaf 100644 --- a/Tests/ManagedCode.Storage.Tests/Common/TestApp/HttpHostProgram.cs +++ b/Tests/ManagedCode.Storage.Tests/Common/TestApp/HttpHostProgram.cs @@ -34,7 +34,7 @@ public static void Main(string[] args) options.MultipartBodyLengthLimit = long.MaxValue; options.MultipartHeadersLengthLimit = int.MaxValue; }); - + var app = builder.Build(); diff --git a/Tests/ManagedCode.Storage.Tests/Constants/ApiEndpoints.cs b/Tests/ManagedCode.Storage.Tests/Constants/ApiEndpoints.cs index caed8a9..66c6dcf 100644 --- a/Tests/ManagedCode.Storage.Tests/Constants/ApiEndpoints.cs +++ b/Tests/ManagedCode.Storage.Tests/Constants/ApiEndpoints.cs @@ -3,7 +3,7 @@ public static class ApiEndpoints { public const string Azure = "azure"; - + public static class Base { public const string UploadFile = "{0}/upload"; diff --git a/Tests/ManagedCode.Storage.Tests/Core/StringStreamTests.cs b/Tests/ManagedCode.Storage.Tests/Core/StringStreamTests.cs index def8529..17d3039 100644 --- a/Tests/ManagedCode.Storage.Tests/Core/StringStreamTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Core/StringStreamTests.cs @@ -18,10 +18,10 @@ public void StringStream_EmptyString_ShouldWork() { // Arrange var input = ""; - + // Act using var stream = new StringStream(input); - + // Assert stream.CanRead.ShouldBeTrue(); stream.CanSeek.ShouldBeTrue(); @@ -35,10 +35,10 @@ public void StringStream_SimpleString_ShouldWork() { // Arrange var input = "Hello"; - + // Act using var stream = new StringStream(input); - + // Assert stream.Length.ShouldBe(10); // 5 chars * 2 bytes each in old implementation stream.ToString().ShouldBe(input); @@ -50,15 +50,15 @@ public void StringStream_ReadByte_ShouldWork() // Arrange var input = "A"; using var stream = new StringStream(input); - + // Act var firstByte = stream.ReadByte(); var secondByte = stream.ReadByte(); var thirdByte = stream.ReadByte(); // Should be EOF - + // Assert firstByte.ShouldNotBe(-1); - secondByte.ShouldNotBe(-1); + secondByte.ShouldNotBe(-1); thirdByte.ShouldBe(-1); // EOF } @@ -67,10 +67,10 @@ public void Utf8StringStream_EmptyString_ShouldWork() { // Arrange var input = ""; - + // Act using var stream = new Utf8StringStream(input); - + // Assert stream.CanRead.ShouldBeTrue(); stream.CanSeek.ShouldBeTrue(); @@ -84,10 +84,10 @@ public void Utf8StringStream_SimpleString_ShouldWork() { // Arrange var input = "Hello"; - + // Act using var stream = new Utf8StringStream(input); - + // Assert stream.Length.ShouldBe(5); // 5 ASCII chars = 5 bytes in UTF-8 stream.ToString().ShouldBe(input); @@ -98,10 +98,10 @@ public void Utf8StringStream_UnicodeString_ShouldWork() { // Arrange var input = "🚀"; // This emoji is 4 bytes in UTF-8 - + // Act using var stream = new Utf8StringStream(input); - + // Assert stream.Length.ShouldBe(4); // Emoji = 4 bytes in UTF-8 stream.ToString().ShouldBe(input); @@ -114,11 +114,11 @@ public void Utf8StringStream_ReadAllBytes_ShouldMatchOriginal() var input = "Test 123"; using var stream = new Utf8StringStream(input); var expectedBytes = Encoding.UTF8.GetBytes(input); - + // Act var buffer = new byte[stream.Length]; var bytesRead = stream.Read(buffer, 0, buffer.Length); - + // Assert bytesRead.ShouldBe(expectedBytes.Length); buffer.ShouldBe(expectedBytes); @@ -131,11 +131,11 @@ public async Task Utf8StringStream_ReadAsync_ShouldWork() var input = "Async test"; using var stream = new Utf8StringStream(input); var expectedBytes = Encoding.UTF8.GetBytes(input); - + // Act var buffer = new byte[stream.Length]; var bytesRead = await stream.ReadAsync(buffer); - + // Assert bytesRead.ShouldBe(expectedBytes.Length); buffer.ShouldBe(expectedBytes); @@ -147,14 +147,14 @@ public void Utf8StringStream_Seek_ShouldWork() // Arrange var input = "Seek test"; using var stream = new Utf8StringStream(input); - + // Act & Assert stream.Seek(0, SeekOrigin.Begin).ShouldBe(0); stream.Position.ShouldBe(0); - + stream.Seek(5, SeekOrigin.Begin).ShouldBe(5); stream.Position.ShouldBe(5); - + stream.Seek(0, SeekOrigin.End).ShouldBe(stream.Length); stream.Position.ShouldBe(stream.Length); } @@ -165,7 +165,7 @@ public void Utf8StringStream_WriteOperations_ShouldThrow() // Arrange using var stream = new Utf8StringStream("test"); var buffer = new byte[5]; - + // Act & Assert var act1 = () => stream.Write(buffer, 0, buffer.Length); Should.Throw(act1); @@ -179,11 +179,11 @@ public void Utf8StringStream_ExtensionMethods_ShouldWork() { // Arrange var input = "Extension test"; - + // Act using var stream1 = input.ToUtf8Stream(); using var stream2 = Encoding.UTF8.GetBytes(input).ToUtf8Stream(); - + // Assert stream1.ToString().ShouldBe(input); stream2.ToString().ShouldBe(input); @@ -199,7 +199,7 @@ public void Utf8StringStream_VariousInputs_ShouldPreserveContent(string input) { // Act using var stream = new Utf8StringStream(input); - + // Assert stream.ToString().ShouldBe(input); stream.Length.ShouldBe(Encoding.UTF8.GetByteCount(input)); @@ -210,11 +210,11 @@ public void StringStreams_MemoryComparison_Utf8ShouldBeMoreEfficient() { // Arrange var input = "Memory test 🚀"; // Contains Unicode - + // Act using var oldStream = new StringStream(input); using var newStream = new Utf8StringStream(input); - + // Assert newStream.Length.ShouldBeLessThanOrEqualTo(oldStream.Length); oldStream.ToString().ShouldBe(newStream.ToString()); diff --git a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/FormFileExtensionsTests.cs b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/FormFileExtensionsTests.cs index 291f994..337ac2b 100644 --- a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/FormFileExtensionsTests.cs +++ b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/FormFileExtensionsTests.cs @@ -44,7 +44,7 @@ public async Task ToLocalFileAsync_LargeFile() localFile.FileStream.Length.ShouldBe(formFile.Length); Path.GetExtension(localFile.Name).ShouldBe(Path.GetExtension(formFile.FileName)); } - + [Fact] public async Task ToLocalFilesAsync_SmallFiles() { diff --git a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StorageExtensionsTests.cs b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StorageExtensionsTests.cs index 6877210..f4f21a7 100644 --- a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StorageExtensionsTests.cs +++ b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StorageExtensionsTests.cs @@ -176,21 +176,23 @@ public void MultipleStorages_WithDifferentKeys() { // Arrange var services = new ServiceCollection(); - - services.AddFileSystemStorage("storage1", opt => { - opt.BaseFolder = Path.Combine(Environment.CurrentDirectory, "managed-code-bucket-1"); + + services.AddFileSystemStorage("storage1", opt => + { + opt.BaseFolder = Path.Combine(Environment.CurrentDirectory, "managed-code-bucket-1"); }); - - services.AddFileSystemStorage("storage2", opt => { - opt.BaseFolder = Path.Combine(Environment.CurrentDirectory, "managed-code-bucket-2"); + + services.AddFileSystemStorage("storage2", opt => + { + opt.BaseFolder = Path.Combine(Environment.CurrentDirectory, "managed-code-bucket-2"); }); - + var provider = services.BuildServiceProvider(); - + // Act var storage1 = provider.GetKeyedService("storage1"); var storage2 = provider.GetKeyedService("storage2"); - + // Assert storage1.ShouldNotBeNull(); storage2.ShouldNotBeNull(); diff --git a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StoragePrivderExtensionsTests.cs b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StoragePrivderExtensionsTests.cs index 5366745..81c6bdc 100644 --- a/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StoragePrivderExtensionsTests.cs +++ b/Tests/ManagedCode.Storage.Tests/ExtensionsTests/StoragePrivderExtensionsTests.cs @@ -35,7 +35,7 @@ public StorageFactoryTests() { ServiceProvider = ConfigureServices(); } - + public ServiceProvider ServiceProvider { get; } public static ServiceProvider ConfigureServices() @@ -46,13 +46,13 @@ public static ServiceProvider ConfigureServices() { BaseFolder = Path.Combine(Environment.CurrentDirectory, "managed-code-bucket") }); - + services.AddAzureStorage(new AzureStorageOptions { Container = "managed-code-bucket", ConnectionString = "UseDevelopmentStorage=true" }); - + services.AddGCPStorage(new GCPStorageOptions { BucketOptions = new BucketOptions @@ -67,10 +67,10 @@ public static ServiceProvider ConfigureServices() } }); - + var config = new AmazonS3Config(); config.ServiceURL = "http://localhost:4443"; - + services.AddAWSStorage(new AWSStorageOptions { PublicKey = "localkey", @@ -97,7 +97,7 @@ public void CreateAzureStorage() }); storage.GetType().ShouldBe(typeof(AzureStorage)); } - + [Fact] public void CreateAwsStorage() { @@ -133,7 +133,7 @@ public void CreateGcpStorage() }); storage.GetType().ShouldBe(typeof(GCPStorage)); } - + [Fact] public void UpdateAzureStorage() { @@ -146,7 +146,7 @@ public void UpdateAzureStorage() .ShouldBe(containerName); } - + [Fact] public void UpdateAwsStorage() { @@ -156,7 +156,7 @@ public void UpdateAwsStorage() storage.StorageClient .ShouldNotBeNull(); } - + [Fact] public void UpdateGcpStorage() { @@ -166,6 +166,6 @@ public void UpdateGcpStorage() storage.StorageClient .ShouldNotBeNull(); } - + } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/ContainerTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/ContainerTests.cs index 7fe67b6..3e0e4ff 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/ContainerTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/ContainerTests.cs @@ -65,7 +65,7 @@ public async Task DeleteDirectory_ShouldBeSuccess() // Assert result.IsSuccess .ShouldBeTrue(result.Problem?.Detail ?? "Failed without details"); - + blobs.Count .ShouldBe(0); } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs index d9af1eb..c0ce567 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs @@ -256,8 +256,8 @@ public async Task UploadAsync_WithCancellationToken_ShouldCancel() result.IsSuccess .ShouldBeFalse(); } - - + + [Fact] public virtual async Task UploadAsync_WithCancellationToken_BigFile_ShouldCancel() { @@ -282,7 +282,7 @@ public virtual async Task UploadAsync_WithCancellationToken_BigFile_ShouldCancel // Assert uploadResult.IsSuccess .ShouldBeFalse(); - - + + } } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs index be986e1..221f2c8 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs @@ -69,4 +69,38 @@ public async Task CloudKitClient_WithHttpHandler_RoundTrip() (await client.ExistsAsync(recordName, CancellationToken.None)).ShouldBeFalse(); (await client.DeleteAsync(recordName, CancellationToken.None)).ShouldBeFalse(); } + + [Fact] + public async Task CloudKitClient_WithWebAuthToken_ShouldRotateTokenAcrossRequests() + { + var handler = new FakeCloudKitHttpHandler(); + var httpClient = new HttpClient(handler); + + var options = new CloudKitStorageOptions + { + ContainerId = "iCloud.com.example.app", + Environment = CloudKitEnvironment.Development, + Database = CloudKitDatabase.Public, + ApiToken = "test-token", + WebAuthToken = "initial-web-token", + RecordType = "MCStorageFile", + PathFieldName = "path", + ContentTypeFieldName = "contentType", + AssetFieldName = "file" + }; + + using var client = new CloudKitClient(options, httpClient); + + await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("cloudkit payload"))) + { + _ = await client.UploadAsync("record-rotating", "app-data/rotating.txt", uploadStream, "text/plain", CancellationToken.None); + } + + options.WebAuthToken.ShouldNotBeNull(); + options.WebAuthToken.ShouldNotBe("initial-web-token"); + options.WebAuthToken.ShouldStartWith("web-token-"); + + (await client.ExistsAsync("record-rotating", CancellationToken.None)).ShouldBeTrue(); + options.WebAuthToken.ShouldStartWith("web-token-"); + } } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs index 1b7adff..9b47558 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs @@ -17,6 +17,8 @@ internal sealed class FakeCloudKitHttpHandler : HttpMessageHandler private readonly Dictionary _records = new(StringComparer.OrdinalIgnoreCase); private readonly Dictionary _uploads = new(StringComparer.OrdinalIgnoreCase); + private string? _expectedWebAuthToken; + private int _webAuthTokenCounter; protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) { @@ -45,6 +47,13 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r return new HttpResponseMessage(HttpStatusCode.MethodNotAllowed); } + var query = ParseQuery(request.RequestUri?.Query); + var (rotatedWebAuthToken, webAuthError) = ValidateAndRotateWebAuthToken(query); + if (webAuthError != null) + { + return webAuthError; + } + if (path.EndsWith("/assets/upload", StringComparison.OrdinalIgnoreCase)) { var body = await request.Content!.ReadAsStringAsync(cancellationToken); @@ -53,9 +62,9 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r var recordName = token.GetProperty("recordName").GetString() ?? string.Empty; var fieldName = token.GetProperty("fieldName").GetString() ?? "file"; - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - tokens = new[] + ["tokens"] = new[] { new { @@ -64,7 +73,7 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r url = $"https://{AssetsHost}/upload/{recordName}" } } - }); + }, rotatedWebAuthToken); } if (path.EndsWith("/records/modify", StringComparison.OrdinalIgnoreCase)) @@ -80,12 +89,12 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r { if (_records.Remove(recordName)) { - return JsonResponse(new { }); + return JsonResponseWithToken(new Dictionary(), rotatedWebAuthToken); } - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - errors = new[] + ["errors"] = new[] { new { @@ -93,7 +102,7 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r serverErrorCode = "NOT_FOUND" } } - }); + }, rotatedWebAuthToken); } if (!string.Equals(type, "forceUpdate", StringComparison.OrdinalIgnoreCase)) @@ -112,13 +121,13 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r var stored = new StoredRecord(recordName, recordType, internalPath, contentType, content, now, now); _records[recordName] = stored; - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - records = new[] + ["records"] = new[] { ToRecordResponse(stored) } - }); + }, rotatedWebAuthToken); } if (path.EndsWith("/records/lookup", StringComparison.OrdinalIgnoreCase)) @@ -129,9 +138,9 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r if (!_records.TryGetValue(recordName, out var stored)) { - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - errors = new[] + ["errors"] = new[] { new { @@ -139,16 +148,16 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r serverErrorCode = "NOT_FOUND" } } - }); + }, rotatedWebAuthToken); } - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - records = new[] + ["records"] = new[] { ToRecordResponse(stored) } - }); + }, rotatedWebAuthToken); } if (path.EndsWith("/records/query", StringComparison.OrdinalIgnoreCase)) @@ -168,10 +177,10 @@ private async Task HandleCloudKitAsync(HttpRequestMessage r .Select(ToRecordResponse) .ToList(); - return JsonResponse(new + return JsonResponseWithToken(new Dictionary { - records = results - }); + ["records"] = results + }, rotatedWebAuthToken); } return new HttpResponseMessage(HttpStatusCode.NotFound) @@ -259,6 +268,61 @@ private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode s return response; } + private static HttpResponseMessage JsonResponseWithToken(Dictionary payload, string? webAuthToken, HttpStatusCode statusCode = HttpStatusCode.OK) + { + if (!string.IsNullOrWhiteSpace(webAuthToken)) + { + payload["ckWebAuthToken"] = webAuthToken; + } + + return JsonResponse(payload, statusCode); + } + + private static Dictionary ParseQuery(string? query) + { + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + if (string.IsNullOrWhiteSpace(query)) + { + return result; + } + + foreach (var part in query.TrimStart('?').Split('&', StringSplitOptions.RemoveEmptyEntries)) + { + var kv = part.Split('=', 2); + var key = Uri.UnescapeDataString(kv[0]); + var value = kv.Length == 2 ? Uri.UnescapeDataString(kv[1]) : string.Empty; + result[key] = value; + } + + return result; + } + + private (string? RotatedToken, HttpResponseMessage? ErrorResponse) ValidateAndRotateWebAuthToken(Dictionary query) + { + if (!query.TryGetValue("ckWebAuthToken", out var token) || string.IsNullOrWhiteSpace(token)) + { + return (null, null); + } + + if (_expectedWebAuthToken == null) + { + _expectedWebAuthToken = token; + } + else if (!string.Equals(_expectedWebAuthToken, token, StringComparison.Ordinal)) + { + return (null, JsonResponse(new + { + uuid = Guid.NewGuid().ToString("N"), + serverErrorCode = "AUTHENTICATION_REQUIRED", + reason = "Invalid ckWebAuthToken." + }, HttpStatusCode.Unauthorized)); + } + + var rotated = "web-token-" + Interlocked.Increment(ref _webAuthTokenCounter); + _expectedWebAuthToken = rotated; + return (rotated, null); + } + private sealed record StoredRecord( string RecordName, string RecordType, @@ -268,4 +332,3 @@ private sealed record StoredRecord( DateTimeOffset CreatedOn, DateTimeOffset LastModified); } - diff --git a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs index 03d5817..d89529e 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs @@ -27,7 +27,7 @@ public async Task UploadAsync_AsStream_CorrectlyOverwritesFiles() { // Arrange - var uploadStream1 = new MemoryStream(90*1024); + var uploadStream1 = new MemoryStream(90 * 1024); var buffer = new byte[90 * 1024]; var random = new Random(); random.NextBytes(buffer); @@ -57,7 +57,7 @@ public async Task UploadAsync_AsStream_CorrectlyOverwritesFiles() }); downloadedResult.IsSuccess.ShouldBeTrue(); // size - downloadedResult.Value!.FileInfo.Length.ShouldBe(90*1024); + downloadedResult.Value!.FileInfo.Length.ShouldBe(90 * 1024); var secondResult = await Storage.UploadAsync(uploadStream2, options => diff --git a/Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSUploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSUploadTests.cs index 9ff67f5..0689d1a 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSUploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSUploadTests.cs @@ -27,6 +27,7 @@ protected override ServiceProvider ConfigureServices() [InlineData(5)] public override Task UploadAsync_LargeStream_ShouldRoundTrip(int gigabytes) { + _ = gigabytes; return Task.CompletedTask; } } diff --git a/docs/ADR/0001-icloud-drive-support.md b/docs/ADR/0001-icloud-drive-support.md new file mode 100644 index 0000000..c1d2764 --- /dev/null +++ b/docs/ADR/0001-icloud-drive-support.md @@ -0,0 +1,73 @@ +# ADR 0001: iCloud Drive Support vs CloudKit (Server-side) + +## Status + +Accepted — 2025-12-14 + +## Context + +This repository provides a provider-agnostic `IStorage` abstraction for server-side and cross-platform .NET apps. + +We want an “Apple cloud storage” option that behaves like other providers (upload/download/list/delete/metadata) and can run: + +- in server environments (Linux containers, CI) and +- without requiring Apple-only runtime APIs. + +The user request is “iCloud” as a storage backend, ideally for file-like storage similar to OneDrive/Google Drive/Dropbox. + +## Problem + +Apple’s “iCloud Drive” is primarily exposed to Apple platform apps via OS-level document APIs and does **not** have a stable, public, server-side file API comparable to: + +- Microsoft Graph (OneDrive), +- Google Drive REST API, or +- Dropbox HTTP API. + +There are unofficial approaches (reverse-engineered iCloud.com traffic) but they: + +- are brittle and can break without notice, +- often require Apple ID + 2FA/session cookies (high operational/security risk), +- are not ideal for a general-purpose OSS storage provider. + +## Decision + +We will **not** implement an `iCloud Drive` provider in this repository unless Apple publishes and supports a proper server-side file API. + +Instead, we support **CloudKit Web Services** via `ManagedCode.Storage.CloudKit`, which is an official Apple service intended for app data stored in iCloud. + +CloudKit is not “iCloud Drive”, but it is the closest official server-side storage surface Apple exposes for app data. + +```mermaid +flowchart TD + Want["Need server-side iCloud file storage"] --> CheckAPI{Official iCloud Drive file API?} + CheckAPI -->|No| CloudKit["Use CloudKit Web Services (ManagedCode.Storage.CloudKit)"] + CheckAPI -->|Yes| Future["Add iCloud Drive provider (future)"] + CloudKit --> Note["Document limitations: app data != iCloud Drive"] +``` + +## Consequences + +### Positive + +- Keeps the storage layer stable and supportable (no reverse-engineered contracts). +- Avoids unsafe auth patterns (Apple ID passwords, session scraping). +- Works in CI and server environments. + +### Negative + +- Users who explicitly need iCloud Drive (document storage visible in Finder/iOS Files) cannot use this library for that use-case. +- CloudKit has quotas and record/asset constraints that differ from “drive” providers. + +## Alternatives Considered + +1. **Unofficial iCloud Drive API wrappers** + - Rejected due to brittleness, auth complexity (2FA), and reliability/security risks. +2. **Apple platform-only implementation** + - Would not satisfy server-side scenarios and would complicate the provider model. + +## References (Internal) + +- `docs/Development/credentials.md` (CloudKit auth + setup) +- `docs/Features/provider-cloudkit.md` +- `README.md` (CloudKit section + explicit “not iCloud Drive” note) + diff --git a/docs/ADR/index.md b/docs/ADR/index.md new file mode 100644 index 0000000..bcf5f6f --- /dev/null +++ b/docs/ADR/index.md @@ -0,0 +1,20 @@ +# Architecture Decisions (ADR) + +Architecture Decision Records capture the **why** behind key technical choices. They are intentionally short, but must be specific enough that a future contributor can understand: + +- what problem we had, +- what options we considered, +- what we decided and why, +- what the consequences are. + +```mermaid +flowchart LR + Problem[Problem] --> Options[Options] + Options --> Decision[Decision] + Decision --> Consequences[Consequences] + Consequences --> Code[Code + Tests] +``` + +## ADR List + +- [ADR 0001: iCloud Drive Support vs CloudKit (Server-side)](0001-icloud-drive-support.md) — iCloud Drive is not implemented; CloudKit is supported as the official server-side Apple option. diff --git a/docs/API/index.md b/docs/API/index.md new file mode 100644 index 0000000..7d195ea --- /dev/null +++ b/docs/API/index.md @@ -0,0 +1,14 @@ +# API + +ManagedCode.Storage exposes an HTTP + SignalR integration surface via `ManagedCode.Storage.Server`. + +```mermaid +flowchart LR + App[Client app] --> Http[HTTP Controllers] + App --> Hub[SignalR Hub] + Http --> Storage[IStorage] + Hub --> Storage + Storage --> Provider[Concrete storage provider] +``` + +- [Storage server (HTTP + SignalR)](storage-server.md) diff --git a/docs/API/storage-server.md b/docs/API/storage-server.md new file mode 100644 index 0000000..ccfef80 --- /dev/null +++ b/docs/API/storage-server.md @@ -0,0 +1,155 @@ +# API: Storage Server (HTTP + SignalR) + +This document describes the integration surface exposed by `Integraions/ManagedCode.Storage.Server`. + +## Request Flow + +```mermaid +flowchart LR + Client --> Controller[StorageControllerBase / StorageController] + Client --> Hub[StorageHubBase / StorageHub] + Controller --> Storage[IStorage] + Hub --> Storage + Storage --> Provider[Concrete provider] +``` + +## HTTP API + +Default controller: `Integraions/ManagedCode.Storage.Server/Controllers/StorageController.cs` + +Base route: + +- `/api/storage` + +### Upload (multipart) + +- `POST /api/storage/upload` +- Request: `multipart/form-data` with a single `IFormFile` field named `file` +- Response: `Result` + +Notes: + +- If `StorageServerOptions.EnableFileSizeValidation` is enabled, the controller enforces `MaxFileSize`. + +```mermaid +sequenceDiagram + participant C as Client + participant API as POST /api/storage/upload + participant S as IStorage + C->>API: multipart/form-data (IFormFile) + API->>S: UploadAsync(stream, UploadOptions) + S-->>API: Result + API-->>C: Result +``` + +### Upload (stream) + +- `POST /api/storage/upload/stream` +- Request: raw body stream (`Request.Body`) +- Required headers: + - `X-File-Name` +- Optional headers: + - `X-Content-Type` + - `X-Directory` +- Response: `Result` + +```mermaid +sequenceDiagram + participant C as Client + participant API as POST /api/storage/upload/stream + participant S as IStorage + C->>API: Request.Body + headers (X-File-Name, X-Content-Type, X-Directory) + API->>S: UploadAsync(Request.Body, UploadOptions) + S-->>API: Result + API-->>C: Result +``` + +### Download (file response) + +- `GET /api/storage/download/{*path}` +- Response: `FileStreamResult` (`Content-Type` resolved via `MimeHelper`) +- Range requests: + - enabled when `StorageServerOptions.EnableRangeProcessing = true` + +### Stream (inline) + +- `GET /api/storage/stream/{*path}` +- Response: `FileStreamResult` without a `fileDownloadName` (useful for inline streaming) +- Range requests: + - enabled when `StorageServerOptions.EnableRangeProcessing = true` + +### Download as bytes + +- `GET /api/storage/download-bytes/{*path}` +- Response: `File(byte[])` + +### Chunked upload + +Chunk payload model: `Integraions/ManagedCode.Storage.Server/Models/FileUploadPayload.cs` + +- `POST /api/storage/upload-chunks/upload` + - Request: `multipart/form-data` + - Response: `Result` +- `POST /api/storage/upload-chunks/complete` + - Request: JSON (`ChunkUploadCompleteRequest`) + - Response: `Result` +- `DELETE /api/storage/upload-chunks/{uploadId}` + - Response: `204 No Content` + +```mermaid +sequenceDiagram + participant C as Client + participant API as Chunk endpoints + participant Ch as ChunkUploadService + participant S as IStorage + loop For each chunk + C->>API: POST /upload-chunks/upload (multipart) + API->>Ch: AppendChunkAsync(...) + Ch-->>API: Result + API-->>C: Result + end + C->>API: POST /upload-chunks/complete (JSON) + API->>Ch: CompleteAsync(...) + Ch->>S: UploadAsync(mergedStream, UploadOptions) [CommitToStorage=true] + S-->>Ch: Result + Ch-->>API: Result + API-->>C: Result +``` + +## SignalR Hub API + +Default hub: + +- `Integraions/ManagedCode.Storage.Server/Hubs/StorageHub.cs` +- Default route mapping: `Integraions/ManagedCode.Storage.Server/Extensions/StorageEndpointRouteBuilderExtensions.cs` + - `/hubs/storage` + +Key hub methods (base implementation in `StorageHubBase`): + +- `Task BeginUploadStreamAsync(UploadStreamDescriptor descriptor)` +- `IAsyncEnumerable UploadStreamContentAsync(string transferId, IAsyncEnumerable stream, CancellationToken ct)` +- `IAsyncEnumerable DownloadStreamAsync(string blobName, CancellationToken ct)` +- `Task GetStatusAsync(string transferId)` +- `Task CancelTransferAsync(string transferId)` + +Progress events emitted to the caller: + +- `TransferProgress` +- `TransferCompleted` +- `TransferCanceled` +- `TransferFaulted` + +Event names are defined in `Integraions/ManagedCode.Storage.Server/Hubs/StorageHubBase.cs` (`StorageHubEvents`). + +```mermaid +sequenceDiagram + participant C as Client + participant Hub as /hubs/storage + participant S as IStorage + C->>Hub: BeginUploadStreamAsync(descriptor) + Hub-->>C: transferId + C->>Hub: UploadStreamContentAsync(transferId, byteStream) + Hub->>S: UploadAsync(tempFileStream, UploadOptions) + S-->>Hub: Result + Hub-->>C: TransferStatus progress/completed events +``` diff --git a/docs/Development/credentials.md b/docs/Development/credentials.md new file mode 100644 index 0000000..da318f6 --- /dev/null +++ b/docs/Development/credentials.md @@ -0,0 +1,165 @@ +# Credentials & Auth (Cloud Drives + CloudKit) + +This document explains how to obtain credentials for the providers that require OAuth / platform-specific keys: + +- OneDrive (Microsoft Graph / Entra ID) +- Google Drive (Google APIs) +- Dropbox (Dropbox API) +- CloudKit (iCloud app data / CloudKit Web Services) + +> iCloud Drive does not expose a public server-side file API. `ManagedCode.Storage.CloudKit` targets CloudKit app data, not iCloud Drive. + +## Overview + +```mermaid +flowchart LR + App[Your app] --> Auth[OAuth / Platform credentials] + Auth --> SDK[Official SDK client] + SDK --> Storage[ManagedCode.Storage provider] +``` + +## OneDrive (Microsoft Graph / Entra ID) + +### What you need + +- Entra ID (Azure AD) **tenant id** +- App registration **client id** +- Either: + - **client secret** (server-to-server), or + - a delegated flow (interactive user auth) + +```mermaid +sequenceDiagram + participant Admin as Admin + participant Entra as Entra ID + participant App as Your app + participant Graph as Microsoft Graph + Admin->>Entra: Register app + permissions + App->>Entra: Get access token + App->>Graph: Call Graph API +``` + +### Typical steps (server-to-server) + +1. Create an Entra ID app registration. +2. Add Microsoft Graph **Application** permissions (example: `Files.ReadWrite.All` or `Sites.ReadWrite.All`) and grant admin consent. +3. Create a client secret and store it securely. + +### Suggested configuration keys + +- `OneDrive:TenantId` +- `OneDrive:ClientId` +- `OneDrive:ClientSecret` + +## Google Drive (Google APIs) + +### What you need + +- Google Cloud project with **Google Drive API** enabled +- Either: + - **Service account** JSON (server apps) + access to the target folder/drive, or + - OAuth client id/secret (interactive user auth) + +```mermaid +sequenceDiagram + participant Dev as Developer + participant GCP as Google Cloud Console + participant App as Your app + participant Drive as Google Drive API + Dev->>GCP: Enable Drive API + create credentials + App->>Drive: Call Drive API via DriveService +``` + +### Typical steps (service account) + +1. Enable the Google Drive API. +2. Create a service account and download a JSON key. +3. Share the target folder/drive with the service account email so it can access files. + +### Suggested configuration keys + +- `GoogleDrive:ServiceAccountJsonPath` +- `GoogleDrive:RootFolderId` + +## Dropbox + +### What you need + +- Dropbox App Console app +- App key (and optionally app secret) +- Either: + - **access token** (quick/testing), or + - **refresh token** (recommended for production “offline access”) + +```mermaid +sequenceDiagram + participant Dev as Developer + participant Console as Dropbox App Console + participant User as User + participant App as Your app + participant Dropbox as Dropbox API + Dev->>Console: Create app + scopes + User->>App: OAuth authorize + App->>Dropbox: Exchange code for tokens + App->>Dropbox: Use access/refresh token for API calls +``` + +### Typical steps + +1. Create an app in the Dropbox App Console (Scoped access, Full Dropbox or App folder). +2. Enable required scopes (example: `files.content.read`, `files.content.write`, `files.metadata.read`, `files.metadata.write`). +3. Obtain a token: + - quick: generate an access token in the app console + - production: use OAuth code flow to obtain an access token + refresh token + +### Suggested configuration keys + +- `Dropbox:AccessToken` +- `Dropbox:RefreshToken` +- `Dropbox:AppKey` +- `Dropbox:AppSecret` +- `Dropbox:RootPath` (example: `/Apps/`) + +## CloudKit (iCloud app data) + +### What you need + +- CloudKit container id (example: `iCloud.com.company.app`) +- CloudKit schema (record type + fields) for files +- Auth: + - API token (`ckAPIToken`) **or** + - server-to-server key + private key (for supported scenarios) + +```mermaid +sequenceDiagram + participant Dev as Developer + participant Apple as Apple Developer / CloudKit Dashboard + participant App as Your app + participant CK as CloudKit Web Services + Dev->>Apple: Configure container + schema + token/key + App->>CK: Signed/API-token request + CK-->>App: Record + Asset URL +``` + +### Typical steps + +1. Configure the container in CloudKit Dashboard and note the container id. +2. Ensure the record type exists (default `MCStorageFile`) and add fields: + - `path` (String, queryable/indexed) + - `contentType` (String) + - `file` (Asset) +3. Create an API token for the container (or server-to-server key) and store it securely. + +### Web auth token note (ckWebAuthToken) + +If you use `ckWebAuthToken` (typically for user-scoped/private DB scenarios), CloudKit treats it as **single-use** and returns a rotated token on each response. The next request must use the new token. + +`ManagedCode.Storage.CloudKit` will update `CloudKitStorageOptions.WebAuthToken` when a rotated token is returned, and serializes CloudKit requests when `WebAuthToken` is set (to avoid concurrent requests using an invalidated token). + +### Suggested configuration keys + +- `CloudKit:ContainerId` +- `CloudKit:ApiToken` (or `CloudKit:KeyId` + `CloudKit:PrivateKeyPem`) +- `CloudKit:Environment` (Development/Production) +- `CloudKit:Database` (Public/Private) +- `CloudKit:RootPath` diff --git a/docs/Development/seo-audit-github-pages.md b/docs/Development/seo-audit-github-pages.md new file mode 100644 index 0000000..047113b --- /dev/null +++ b/docs/Development/seo-audit-github-pages.md @@ -0,0 +1,94 @@ +# GitHub Pages SEO Audit (Docs Site) + +This document audits the SEO/readability essentials of the GitHub Pages docs site generated from `README.md` + `docs/**`. + +## How the site is built + +The docs site is produced by GitHub Actions by generating pages under `github-pages/` and then building with Jekyll. + +```mermaid +flowchart LR + README[README.md] --> Gen[Pages generator
.github/workflows/jekyll-gh-pages.yml] + Docs[docs/**] --> Gen + Gen --> SiteSrc[github-pages/**] + SiteSrc --> Jekyll[Jekyll build] + Jekyll --> Pages[GitHub Pages
https://managedcode.github.io/Storage/] +``` + +## Site-wide checks (layout/config) + +Source of truth: + +- Layout: `github-pages/_layouts/default.html` +- Config: `github-pages/_config.yml` +- Sitemap: `github-pages/sitemap.xml` +- Robots: `github-pages/robots.txt` + +### What’s covered + +- **Title tags**: `Home` uses `site.title - site.tagline`; all other pages use `page.title | site.title`. +- **Meta descriptions**: every page has a `description` (manual for top-level pages, extracted for doc pages). +- **Keywords**: every page has `keywords` (manual for top-level pages, mapped for doc pages; falls back to site keywords). +- **Canonical URLs**: always include `site.url` + `site.baseurl` + `page.url` (GitHub Pages-friendly). +- **Open Graph / Twitter**: includes `og:title/description/url/image` and `twitter:*` equivalents. +- **Structured data (JSON-LD)**: includes `WebSite` and `SoftwareSourceCode`. +- **Sitemap**: includes all HTML pages except `/404.html`. +- **Robots**: site-wide `index,follow`; `/404.html` uses `noindex,follow`. + +### High-impact fixes applied + +- **Broken internal links**: source docs use `.md` links (good for GitHub), but the site renders `.html`. The generator rewrites `.md -> .html` for site pages. +- **Duplicate/generic meta descriptions**: doc pages now derive a description from the first meaningful paragraph (normalized, trimmed, max ~160 chars). +- **Social previews**: added an OG/Twitter image (`github-pages/assets/images/og-image.png`). +- **404 indexing**: `/404.html` is excluded from sitemap and marked `noindex`. + +## Page-by-page audit + +Notes: + +- URLs below are **relative to** `https://managedcode.github.io/Storage`. +- For doc pages (Features/API/ADR), `description` is generated from doc content by the workflow: + - skips headings, code fences, bullet lists + - skips ADR status lines (`Accepted/Proposed/...`) + - normalizes whitespace, trims trailing `:`, truncates to ~160 chars + +| URL | Title | Meta description | H1 | Notes | +| --- | --- | --- | --- | --- | +| `/` | Home | ManagedCode.Storage documentation: cross-provider storage toolkit for .NET and ASP.NET streaming scenarios. | ManagedCode.Storage | Title uses `site.title - site.tagline`; content from `README.md`. | +| `/setup.html` | Setup | How to clone, build, and run tests for ManagedCode.Storage. | Development Setup | Generated from `docs/Development/setup.md`. | +| `/credentials.html` | Credentials | How to obtain credentials for OneDrive, Google Drive, Dropbox, and CloudKit. | Credentials & Auth (Cloud Drives + CloudKit) | Generated from `docs/Development/credentials.md`. | +| `/testing.html` | Testing | Test strategy and how to run the ManagedCode.Storage test suite. | Testing Strategy | Generated from `docs/Testing/strategy.md`. | +| `/features/index.html` | Features | Documentation for major modules and providers in ManagedCode.Storage. | Features | Internal links rewritten (`.md` -> `.html`). | +| `/features/chunked-uploads.html` | Feature: Chunked Uploads (HTTP + Client) | Support reliable, resumable uploads of large files over unreliable connections by splitting a payload into chunks and completing with an integrity check (CRC32) | Feature: Chunked Uploads (HTTP + Client) | | +| `/features/dependency-injection.html` | Feature: Dependency Injection & Keyed Registrations | Make storage wiring predictable and scalable for .NET apps by supporting both a single default `IStorage` and multiple keyed storage registrations (multi-tenan… | Feature: Dependency Injection & Keyed Registrations | | +| `/features/integration-aspnet-server.html` | Feature: ASP.NET Server (`ManagedCode.Storage.Server`) | Expose storage operations over HTTP and SignalR on top of `IStorage` so ASP.NET apps can add streaming upload/download endpoints, chunked uploads, and live pro… | Feature: ASP.NET Server (`ManagedCode.Storage.Server`) | | +| `/features/integration-dotnet-client.html` | Feature: .NET HTTP Client (`ManagedCode.Storage.Client`) | Typed .NET HTTP client for `ManagedCode.Storage.Server` endpoints: multipart uploads, downloads to `LocalFile`, and chunked uploads with progress + CRC32. | Feature: .NET HTTP Client (`ManagedCode.Storage.Client`) | | +| `/features/integration-signalr-client.html` | Feature: .NET SignalR Client (`ManagedCode.Storage.Client.SignalR`) | Typed .NET SignalR client (`StorageSignalRClient`) for `StorageHub`: streaming upload/download helpers plus progress reporting and reconnection support. | Feature: .NET SignalR Client (`ManagedCode.Storage.Client.SignalR`) | | +| `/features/mime-and-crc.html` | Feature: MIME & Integrity Helpers (MimeHelper + CRC32) | Provide consistent content-type and integrity behaviour across providers and integrations | Feature: MIME & Integrity Helpers (MimeHelper + CRC32) | | +| `/features/provider-aws-s3.html` | Feature: Amazon S3 Provider (`ManagedCode.Storage.Aws`) | Implement `IStorage` on top of **Amazon S3**, including streaming and container (bucket) management semantics where applicable. | Feature: Amazon S3 Provider (`ManagedCode.Storage.Aws`) | | +| `/features/provider-azure-blob.html` | Feature: Azure Blob Storage Provider (`ManagedCode.Storage.Azure`) | Implement `IStorage` on top of **Azure Blob Storage** using the Azure SDK, including streaming and metadata operations. | Feature: Azure Blob Storage Provider (`ManagedCode.Storage.Azure`) | | +| `/features/provider-azure-datalake.html` | Feature: Azure Data Lake Gen2 Provider (`ManagedCode.Storage.Azure.DataLake`) | Implement `IStorage` on top of **Azure Data Lake Storage Gen2**. | Feature: Azure Data Lake Gen2 Provider (`ManagedCode.Storage.Azure.DataLake`) | | +| `/features/provider-cloudkit.html` | Feature: CloudKit Provider (`ManagedCode.Storage.CloudKit`) | Expose **CloudKit Web Services** (iCloud app data) as `IStorage` so applications can store small/medium blobs in a CloudKit container. | Feature: CloudKit Provider (`ManagedCode.Storage.CloudKit`) | | +| `/features/provider-dropbox.html` | Feature: Dropbox Provider (`ManagedCode.Storage.Dropbox`) | Expose **Dropbox** as `IStorage` so .NET apps can use the Dropbox API via a consistent upload/download/list/delete abstraction | Feature: Dropbox Provider (`ManagedCode.Storage.Dropbox`) | | +| `/features/provider-filesystem.html` | Feature: File System Provider (`ManagedCode.Storage.FileSystem`) | Implement `IStorage` on top of the local file system so you can use the same abstraction in production code, local development, and tests | Feature: File System Provider (`ManagedCode.Storage.FileSystem`) | | +| `/features/provider-google-cloud-storage.html` | Feature: Google Cloud Storage Provider (`ManagedCode.Storage.Gcp`) | Implement `IStorage` on top of **Google Cloud Storage (GCS)** using `Google.Cloud.Storage.V1`. | Feature: Google Cloud Storage Provider (`ManagedCode.Storage.Gcp`) | | +| `/features/provider-googledrive.html` | Feature: Google Drive Provider (`ManagedCode.Storage.GoogleDrive`) | Expose **Google Drive** as `IStorage` so .NET apps can store files in Google Drive via the official Drive API while keeping OAuth/auth concerns in the hosting … | Feature: Google Drive Provider (`ManagedCode.Storage.GoogleDrive`) | | +| `/features/provider-onedrive.html` | Feature: OneDrive Provider (`ManagedCode.Storage.OneDrive`) | Expose **OneDrive / Microsoft Graph** as `IStorage` so .NET apps can store files in a drive/folder via Graph using the same provider-agnostic upload/download/l… | Feature: OneDrive Provider (`ManagedCode.Storage.OneDrive`) | | +| `/features/provider-sftp.html` | Feature: SFTP Provider (`ManagedCode.Storage.Sftp`) | Implement `IStorage` on top of SFTP using SSH (for legacy systems and air-gapped environments). | Feature: SFTP Provider (`ManagedCode.Storage.Sftp`) | | +| `/features/storage-core.html` | Feature: Storage Core Abstraction (`ManagedCode.Storage.Core`) | Provide a single, provider-agnostic storage API for .NET so application code can upload/download/list/stream files without being coupled to vendor SDKs. | Feature: Storage Core Abstraction (`ManagedCode.Storage.Core`) | | +| `/features/testfakes.html` | Feature: Test Fakes (`ManagedCode.Storage.TestFakes`) | Provide lightweight storage doubles for tests and demos, allowing consumers to replace real provider registrations without provisioning cloud resources. | Feature: Test Fakes (`ManagedCode.Storage.TestFakes`) | | +| `/features/virtual-file-system.html` | Feature: Virtual File System (`ManagedCode.Storage.VirtualFileSystem`) | Expose a higher-level “virtual file system” API on top of `IStorage` | Feature: Virtual File System (`ManagedCode.Storage.VirtualFileSystem`) | | +| `/adr/index.html` | ADR | Architecture Decision Records (ADR) for ManagedCode.Storage. | Architecture Decisions (ADR) | Internal links rewritten (`.md` -> `.html`). | +| `/adr/0001-icloud-drive-support.html` | ADR 0001: iCloud Drive Support vs CloudKit (Server-side) | This repository provides a provider-agnostic `IStorage` abstraction for server-side and cross-platform .NET apps. | ADR 0001: iCloud Drive Support vs CloudKit (Server-side) | | +| `/api/index.html` | API | HTTP and SignalR API documentation for ManagedCode.Storage.Server. | API | Internal links rewritten (`.md` -> `.html`). | +| `/api/storage-server.html` | API: Storage Server (HTTP + SignalR) | This document describes the integration surface exposed by `Integraions/ManagedCode.Storage.Server`. | API: Storage Server (HTTP + SignalR) | | +| `/templates.html` | Templates | Documentation templates used in this repository (Feature and ADR templates). | Templates | Lists `docs/templates/*.md` via links to GitHub. | +| `/404.html` | 404 | This page doesn't exist. Check the documentation navigation or go back home. | 404 | Noindex; excluded from sitemap. | + +## Follow-up (optional) + +If you want to push SEO further: + +- Add per-page **breadcrumbs** JSON-LD for Features/API/ADR. +- Add a lightweight **search** (client-side) and emit `SearchAction` JSON-LD for the docs site. +- Add a proper 1200×630 OG image optimized for social previews (current image is the repo `logo.png` copied into the site). diff --git a/docs/Development/setup.md b/docs/Development/setup.md new file mode 100644 index 0000000..f343f9f --- /dev/null +++ b/docs/Development/setup.md @@ -0,0 +1,46 @@ +# Development Setup + +## Prerequisites + +- .NET SDK: **.NET 10** (`10.0.x`) +- Docker: required for Testcontainers-backed integration tests (Azurite / LocalStack / FakeGcsServer / SFTP) + +## Workflow (Local) + +```mermaid +flowchart LR + A[Clone repo] --> B[dotnet restore] + B --> C[dotnet build] + C --> D[dotnet test] + D --> E[dotnet format] + D --> F[Docker daemon] +``` + +## Clone + +```bash +git clone https://github.com/managedcode/Storage.git +cd Storage +``` + +## Restore / Build / Test + +Canonical commands (see `AGENTS.md`): + +```bash +dotnet restore ManagedCode.Storage.slnx +dotnet build ManagedCode.Storage.slnx --configuration Release +dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release +``` + +## Formatting + +```bash +dotnet format ManagedCode.Storage.slnx +``` + +## Notes + +- Start Docker Desktop (or your Docker daemon) before running the full test suite. +- Never commit secrets (cloud keys, OAuth tokens, connection strings). Use environment variables or user secrets. +- Credentials for cloud-drive providers are documented in `docs/Development/credentials.md`. diff --git a/docs/Features/chunked-uploads.md b/docs/Features/chunked-uploads.md new file mode 100644 index 0000000..2508328 --- /dev/null +++ b/docs/Features/chunked-uploads.md @@ -0,0 +1,66 @@ +# Feature: Chunked Uploads (HTTP + Client) + +## Purpose + +Support reliable, resumable uploads of large files over unreliable connections by splitting a payload into chunks and completing with an integrity check (CRC32): + +- server stages chunks to disk via `ChunkUploadService` +- server merges chunks and optionally commits to `IStorage` +- client computes CRC32 during upload and reports progress + +## Main Flows + +```mermaid +sequenceDiagram + participant C as Client (ManagedCode.Storage.Client) + participant API as StorageControllerBase + participant Ch as ChunkUploadService + participant S as IStorage + + loop For each chunk + C->>API: POST /upload-chunks/upload (multipart) + API->>Ch: AppendChunkAsync(...) + Ch-->>API: Result + API-->>C: Result + end + + C->>API: POST /upload-chunks/complete (JSON) + API->>Ch: CompleteAsync(...) + Ch->>S: UploadAsync(mergedStream, UploadOptions) [CommitToStorage=true] + S-->>Ch: Result + Ch-->>API: Result (includes checksum) + API-->>C: Result +``` + +## Components + +Server-side: + +- `Integraions/ManagedCode.Storage.Server/ChunkUpload/ChunkUploadService.cs` +- `Integraions/ManagedCode.Storage.Server/ChunkUpload/ChunkUploadSession.cs` +- `Integraions/ManagedCode.Storage.Server/Models/FileUploadPayload.cs` +- `Integraions/ManagedCode.Storage.Server/Models/ChunkUploadCompleteRequest.cs` +- `Integraions/ManagedCode.Storage.Server/Models/ChunkUploadCompleteResponse.cs` +- Endpoints: + - `Integraions/ManagedCode.Storage.Server/Controllers/StorageControllerBase.cs` + +Client-side: + +- `Integraions/ManagedCode.Storage.Client/StorageClient.cs` (`UploadLargeFile(...)`) +- CRC helpers: + - `ManagedCode.Storage.Core/Helpers/Crc32Helper.cs` + +## Current Behavior + +- Chunks are staged under the server temp path (controlled by server options). +- Completion step merges chunks in order and can: + - keep merged file on disk (`KeepMergedFile`) + - commit merged file to `IStorage` (`CommitToStorage`) +- Client requires `StorageClient.ChunkSize` to be configured before uploading large files. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs` +- `Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs` +- Controller-level flows: + - `Tests/ManagedCode.Storage.Tests/AspNetTests/Abstracts/BaseUploadControllerTests.cs` diff --git a/docs/Features/dependency-injection.md b/docs/Features/dependency-injection.md new file mode 100644 index 0000000..284229e --- /dev/null +++ b/docs/Features/dependency-injection.md @@ -0,0 +1,75 @@ +# Feature: Dependency Injection & Keyed Registrations + +## Purpose + +Make storage wiring predictable and scalable for .NET apps by supporting both a single default `IStorage` and multiple keyed storage registrations (multi-tenant, multi-region, mirroring): + +- register a single `IStorage` as the **default** storage for the app +- register multiple storages side-by-side using **keyed DI** (multi-tenant, multi-region, mirroring, per-workload routing) +- keep provider construction consistent via provider-specific `Add*Storage...(...)` extensions + +## Main Flows + +### Default registration + +```mermaid +flowchart LR + App --> DI[ServiceCollection] + DI --> Default[IStorage (default)] + Default --> Provider[Concrete provider storage] +``` + +### Keyed registration (multi-storage) + +```mermaid +flowchart LR + App --> DI[ServiceCollection] + DI --> A[IStorage keyed: tenant-a] + DI --> B[IStorage keyed: tenant-b] + A --> ProviderA[Provider instance A] + B --> ProviderB[Provider instance B] +``` + +## Components + +Core factory abstractions: + +- `ManagedCode.Storage.Core/Providers/IStorageProvider.cs` — provider contract used by the factory +- `ManagedCode.Storage.Core/Providers/IStorageFactory.cs` — creates storages from options +- `ManagedCode.Storage.Core/Providers/StorageFactory.cs` — resolves provider by options type and instantiates storages +- `ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs` — `AddStorageFactory()` + +Provider registrations: + +- Each provider exposes `Add*Storage(...)` and `Add*StorageAsDefault(...)` (and keyed overloads). +- Examples: + - `Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs` + +Keyed DI: + +- Uses built-in .NET keyed services: + - `AddKeyedSingleton(key, factory)` + - resolve via `GetRequiredKeyedService(key)` or `[FromKeyedServices("key")]` + +## Current Behavior + +- Default registrations bind `IStorage` to the provider-specific typed storage. +- Keyed registrations allow multiple independent options sets and storage instances. +- For cloud-drive providers, a “swap point” interface exists for testability: + - OneDrive: `IOneDriveClient` + - Google Drive: `IGoogleDriveClient` + - Dropbox: `IDropboxClientWrapper` + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs` +- `Tests/ManagedCode.Storage.Tests/ExtensionsTests/StoragePrivderExtensionsTests.cs` + +## Definition of Done + +- README includes default + keyed DI examples. +- Provider docs show minimal DI wiring. +- Tests prove keyed/default registrations resolve the expected services. diff --git a/docs/Features/index.md b/docs/Features/index.md new file mode 100644 index 0000000..abf34fc --- /dev/null +++ b/docs/Features/index.md @@ -0,0 +1,42 @@ +# Features + +This folder documents the major modules in the repository. + +```mermaid +flowchart LR + App[Application code] --> Core[IStorage (Core)] + Core --> Providers[Providers] + Core --> VFS[Virtual File System] + App --> Server[ASP.NET Server] + App --> Clients[Client SDKs] + Server --> Core + Clients --> Server +``` + +## Core + +- [Storage core abstraction](storage-core.md) +- [Dependency injection & keyed registrations](dependency-injection.md) +- [Virtual File System (VFS)](virtual-file-system.md) +- [MIME & integrity helpers (MimeHelper + CRC32)](mime-and-crc.md) +- [Test fakes](testfakes.md) + +## Integrations + +- [ASP.NET server (controllers + SignalR)](integration-aspnet-server.md) +- [.NET HTTP client](integration-dotnet-client.md) +- [.NET SignalR client](integration-signalr-client.md) +- [Chunked uploads (HTTP + client)](chunked-uploads.md) + +## Providers + +- [Azure Blob](provider-azure-blob.md) +- [Azure Data Lake Gen2](provider-azure-datalake.md) +- [Amazon S3](provider-aws-s3.md) +- [Google Cloud Storage](provider-google-cloud-storage.md) +- [File system](provider-filesystem.md) +- [SFTP](provider-sftp.md) +- [OneDrive (Microsoft Graph)](provider-onedrive.md) +- [Google Drive](provider-googledrive.md) +- [Dropbox](provider-dropbox.md) +- [CloudKit (iCloud app data)](provider-cloudkit.md) diff --git a/docs/Features/integration-aspnet-server.md b/docs/Features/integration-aspnet-server.md new file mode 100644 index 0000000..d0c4876 --- /dev/null +++ b/docs/Features/integration-aspnet-server.md @@ -0,0 +1,82 @@ +# Feature: ASP.NET Server (`ManagedCode.Storage.Server`) + +## Purpose + +Expose storage operations over HTTP and SignalR on top of `IStorage` so ASP.NET apps can add streaming upload/download endpoints, chunked uploads, and live progress: + +- upload/download endpoints (including streaming) +- chunked uploads with merge + integrity checks +- SignalR hub for streaming uploads/downloads and progress + +The server package is designed as a base layer: consumers can inherit controllers/hubs and customize routing/auth without being locked into rigid defaults. + +## Main Flows + +### HTTP upload/download + +```mermaid +sequenceDiagram + participant C as Client + participant API as StorageController + participant S as IStorage + C->>API: POST upload (multipart/stream) + API->>S: UploadAsync(...) + S-->>API: Result + API-->>C: 200 + metadata +``` + +### Chunked upload + +- `ChunkUploadService` stores incoming chunks into a temporary session. +- A completion request merges chunks and optionally commits to `IStorage`. + +### SignalR streaming + +- `StorageHub` streams bytes over SignalR and bridges to `IStorage` stream operations. + +## Quickstart + +```bash +dotnet add package ManagedCode.Storage.Server +``` + +```csharp +using ManagedCode.Storage.Server.Extensions; +using ManagedCode.Storage.Server.Extensions.DependencyInjection; + +builder.Services.AddControllers(); +builder.Services.AddStorageServer(); +builder.Services.AddStorageSignalR(); // optional (SignalR streaming) + +var app = builder.Build(); +app.MapControllers(); // /api/storage/* +app.MapStorageHub(); // /hubs/storage +``` + +## Components + +- Controllers: + - `Integraions/ManagedCode.Storage.Server/Controllers/StorageControllerBase.cs` + - `Integraions/ManagedCode.Storage.Server/Controllers/StorageController.cs` +- SignalR: + - `Integraions/ManagedCode.Storage.Server/Hubs/StorageHubBase.cs` + - `Integraions/ManagedCode.Storage.Server/Hubs/StorageHub.cs` +- Chunked upload: + - `Integraions/ManagedCode.Storage.Server/ChunkUpload/ChunkUploadService.cs` + - `Integraions/ManagedCode.Storage.Server/ChunkUpload/ChunkUploadSession.cs` +- Endpoint wiring: + - `Integraions/ManagedCode.Storage.Server/Extensions/StorageEndpointRouteBuilderExtensions.cs` + +## Current Behavior + +- Controllers are “base-first”: `StorageControllerBase` is intended to be inherited so applications can override routing and auth. +- Chunk uploads are designed to be storage-provider agnostic (chunks are staged and merged via server-side logic). + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs` +- `Tests/ManagedCode.Storage.Tests/AspNetTests/*` (in-process test host for controllers + SignalR) + +## References + +- `docs/server-streaming-plan.md` (roadmap document) diff --git a/docs/Features/integration-dotnet-client.md b/docs/Features/integration-dotnet-client.md new file mode 100644 index 0000000..b44b214 --- /dev/null +++ b/docs/Features/integration-dotnet-client.md @@ -0,0 +1,59 @@ +# Feature: .NET HTTP Client (`ManagedCode.Storage.Client`) + +## Purpose + +Typed .NET HTTP client for `ManagedCode.Storage.Server` endpoints: multipart uploads, downloads to `LocalFile`, and chunked uploads with progress + CRC32. + +- multipart uploads +- downloads to `LocalFile` +- chunked uploads with progress + CRC32 + +## Main Flows + +### Chunked upload with CRC + +```mermaid +flowchart TD + A[Stream/File] --> B[StorageClient] + B --> C[Split into chunks] + C --> D[POST /chunks/upload] + D --> E[POST /chunks/complete] + E --> F[Result + CRC] +``` + +## Quickstart + +```bash +dotnet add package ManagedCode.Storage.Client +``` + +```csharp +using ManagedCode.Storage.Client; + +var http = new HttpClient { BaseAddress = new Uri("https://my-api.example") }; +var client = new StorageClient(http); +client.SetChunkSize(5 * 1024 * 1024); // 5 MB + +await using var stream = File.OpenRead("video.mp4"); +var result = await client.UploadLargeFile( + stream, + uploadApiUrl: "/api/storage/upload-chunks/upload", + completeApiUrl: "/api/storage/upload-chunks/complete", + onProgressChanged: percent => Console.WriteLine($"{percent:F1}%")); +``` + +## Components + +- `Integraions/ManagedCode.Storage.Client/IStorageClient.cs` +- `Integraions/ManagedCode.Storage.Client/StorageClient.cs` +- `Integraions/ManagedCode.Storage.Client/ProgressStatus.cs` + +## Current Behavior + +- `StorageClient.ChunkSize` must be set before `UploadLargeFile(...)`. +- CRC is computed during upload using `ManagedCode.Storage.Core.Helpers.Crc32Helper`. +- MIME type is resolved via `MimeHelper` based on file name. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs` diff --git a/docs/Features/integration-signalr-client.md b/docs/Features/integration-signalr-client.md new file mode 100644 index 0000000..bfba917 --- /dev/null +++ b/docs/Features/integration-signalr-client.md @@ -0,0 +1,40 @@ +# Feature: .NET SignalR Client (`ManagedCode.Storage.Client.SignalR`) + +## Purpose + +Typed .NET SignalR client (`StorageSignalRClient`) for `StorageHub`: streaming upload/download helpers plus progress reporting and reconnection support. + +- streaming upload/download helpers +- progress reporting and reconnection support via `StorageSignalRClientOptions` + +## Main Flows + +```mermaid +sequenceDiagram + participant App as App + participant Client as StorageSignalRClient + participant Hub as StorageHub + participant S as IStorage + App->>Client: UploadAsync(stream, descriptor) + Client->>Hub: SignalR stream upload + Hub->>S: UploadAsync(...) + S-->>Hub: Result + Hub-->>Client: status/progress +``` + +## Components + +- `Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClient.cs` +- `Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalRClientOptions.cs` +- `Integraions/ManagedCode.Storage.Client.SignalR/StorageSignalREventNames.cs` +- `Integraions/ManagedCode.Storage.Client.SignalR/Models/*` + +## Current Behavior + +- The client is transport-agnostic as long as it can connect to the server hub URL. +- Progress updates are exposed via strongly-typed status models. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/AspNetTests/Abstracts/BaseSignalRStorageTests.cs` +- `Tests/ManagedCode.Storage.Tests/AspNetTests/Azure/AzureSignalRStorageTests.cs` diff --git a/docs/Features/mime-and-crc.md b/docs/Features/mime-and-crc.md new file mode 100644 index 0000000..0a9e497 --- /dev/null +++ b/docs/Features/mime-and-crc.md @@ -0,0 +1,36 @@ +# Feature: MIME & Integrity Helpers (MimeHelper + CRC32) + +## Purpose + +Provide consistent content-type and integrity behaviour across providers and integrations: + +- MIME type resolution via `MimeHelper` +- streamed CRC32 calculation via `Crc32Helper` for large-file validation and mirroring scenarios + +## Main Flows + +```mermaid +flowchart LR + FileName[File name] --> Mime[MimeHelper.GetMimeType] + Stream[Stream] --> CRC[Crc32Helper] + CRC --> Validation[Compare checksums] +``` + +## Components + +- MIME: + - `ManagedCode.MimeTypes` (`MimeHelper`) +- CRC: + - `ManagedCode.Storage.Core/Helpers/Crc32Helper.cs` + - `ManagedCode.Storage.Core/Helpers/PathHelper.cs` (shared path utilities used by storages) + +## Current Behavior + +- MIME type is typically derived from the file name and stored as `BlobMetadata.MimeType` where providers support it. +- CRC32 can be computed without loading full content into memory (streamed processing). +- All MIME lookups should go through `MimeHelper` (avoid provider-specific or ad-hoc MIME detection). + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Core/Crc32HelperTests.cs` + diff --git a/docs/Features/provider-aws-s3.md b/docs/Features/provider-aws-s3.md new file mode 100644 index 0000000..a6f2a01 --- /dev/null +++ b/docs/Features/provider-aws-s3.md @@ -0,0 +1,57 @@ +# Feature: Amazon S3 Provider (`ManagedCode.Storage.Aws`) + +## Purpose + +Implement `IStorage` on top of **Amazon S3**, including streaming and container (bucket) management semantics where applicable. + +## Main Flows + +```mermaid +flowchart LR + App --> S3[AWSStorage : IAWSStorage] + S3 --> SDK[Amazon.S3 IAmazonS3] + SDK --> S3Cloud[(Amazon S3)] +``` + +## Components + +- `Storages/ManagedCode.Storage.Aws/AWSStorage.cs` +- `Storages/ManagedCode.Storage.Aws/AWSStorageProvider.cs` +- `Storages/ManagedCode.Storage.Aws/BlobStream.cs` +- DI: + - `Storages/ManagedCode.Storage.Aws/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.Aws/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.Aws/Options/AWSStorageOptions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Aws +``` + +```csharp +using Amazon.S3; +using ManagedCode.Storage.Aws.Extensions; + +builder.Services.AddAWSStorageAsDefault(options => +{ + options.Bucket = "my-bucket"; + options.PublicKey = configuration["Aws:AccessKeyId"]; + options.SecretKey = configuration["Aws:SecretAccessKey"]; + options.OriginalOptions = new AmazonS3Config { RegionEndpoint = Amazon.RegionEndpoint.USEast1 }; +}); +``` + +## Current Behavior + +- Supports multiple auth modes (access keys / role / instance profile) via `AWSStorageOptions`. +- Can create the bucket when `CreateContainerIfNotExists = true`. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/AWS/AWSUploadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/AWS/AWSDownloadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/AWS/AWSBlobTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/AWS/AWSContainerTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/AWS/AwsConfigTests.cs` diff --git a/docs/Features/provider-azure-blob.md b/docs/Features/provider-azure-blob.md new file mode 100644 index 0000000..523689e --- /dev/null +++ b/docs/Features/provider-azure-blob.md @@ -0,0 +1,62 @@ +# Feature: Azure Blob Storage Provider (`ManagedCode.Storage.Azure`) + +## Purpose + +Implement `IStorage` on top of **Azure Blob Storage** using the Azure SDK, including streaming and metadata operations. + +## Main Flows + +```mermaid +flowchart LR + App --> AzureStorage[AzureStorage : IAzureStorage] + AzureStorage --> BlobClient[Azure SDK BlobContainerClient/BlobClient] + BlobClient --> Azure[(Azure Blob Storage)] +``` + +## Components + +- Core types: + - `Storages/ManagedCode.Storage.Azure/AzureStorage.cs` + - `Storages/ManagedCode.Storage.Azure/AzureStorageProvider.cs` + - `Storages/ManagedCode.Storage.Azure/BlobStream.cs` (stream helpers) +- DI: + - `Storages/ManagedCode.Storage.Azure/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.Azure/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.Azure/Options/AzureStorageOptions.cs` (connection string) + - `Storages/ManagedCode.Storage.Azure/Options/AzureStorageCredentialsOptions.cs` (token credential) + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Azure +``` + +```csharp +using ManagedCode.Storage.Azure.Extensions; + +builder.Services.AddAzureStorageAsDefault(options => +{ + options.Container = "my-container"; + options.ConnectionString = configuration["Azure:ConnectionString"]; +}); +``` + +## Current Behavior + +- Supports container creation when `CreateContainerIfNotExists = true`. +- Uses Azure SDK transfer options when configured (`UploadTransferOptions`). + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureUploadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureDownloadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureBlobTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureBlobStreamTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureContainerTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureConfigTests.cs` + +## References + +- `README.md` (package list + general usage) +- Azure SDK docs (Blob Storage) diff --git a/docs/Features/provider-azure-datalake.md b/docs/Features/provider-azure-datalake.md new file mode 100644 index 0000000..8b78cf9 --- /dev/null +++ b/docs/Features/provider-azure-datalake.md @@ -0,0 +1,51 @@ +# Feature: Azure Data Lake Gen2 Provider (`ManagedCode.Storage.Azure.DataLake`) + +## Purpose + +Implement `IStorage` on top of **Azure Data Lake Storage Gen2**. + +## Main Flows + +```mermaid +flowchart LR + App --> ADL[AzureDataLakeStorage : IAzureDataLakeStorage] + ADL --> SDK[Azure.Storage.Files.DataLake] + SDK --> ADLS[(ADLS Gen2)] +``` + +## Components + +- `Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorage.cs` +- `Storages/ManagedCode.Storage.Azure.DataLake/AzureDataLakeStorageProvider.cs` +- DI: + - `Storages/ManagedCode.Storage.Azure.DataLake/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.Azure.DataLake/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.Azure.DataLake/Options/AzureDataLakeStorageOptions.cs` + - `Storages/ManagedCode.Storage.Azure.DataLake/Options/OpenReadStreamOptions.cs` + - `Storages/ManagedCode.Storage.Azure.DataLake/Options/OpenWriteStreamOptions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Azure.DataLake +``` + +```csharp +using ManagedCode.Storage.Azure.DataLake.Extensions; + +builder.Services.AddAzureDataLakeStorageAsDefault(options => +{ + options.FileSystem = "my-filesystem"; + options.ConnectionString = configuration["AzureDataLake:ConnectionString"]!; +}); +``` + +## Current Behavior + +- Uses `ConnectionString` + `FileSystem` as the “container” equivalent. +- Creates the filesystem automatically when `CreateContainerIfNotExists = true`. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/Azure/AzureDataLakeTests.cs` diff --git a/docs/Features/provider-cloudkit.md b/docs/Features/provider-cloudkit.md new file mode 100644 index 0000000..cfbc72c --- /dev/null +++ b/docs/Features/provider-cloudkit.md @@ -0,0 +1,75 @@ +# Feature: CloudKit Provider (`ManagedCode.Storage.CloudKit`) + +## Purpose + +Expose **CloudKit Web Services** (iCloud app data) as `IStorage` so applications can store small/medium blobs in a CloudKit container. + +> Note: iCloud Drive does not provide an official server-side file API. This provider targets CloudKit app data, not iCloud Drive. + +## Main Flows + +```mermaid +flowchart LR + App --> CK[CloudKitStorage : ICloudKitStorage] + CK --> Client[ICloudKitClient] + Client --> Web[(CloudKit Web Services)] +``` + +## Components + +- Storage: + - `Storages/ManagedCode.Storage.CloudKit/CloudKitStorage.cs` + - `Storages/ManagedCode.Storage.CloudKit/CloudKitStorageProvider.cs` +- Client: + - `Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitClient.cs` + - `Storages/ManagedCode.Storage.CloudKit/Clients/ICloudKitClient.cs` +- Options / DI: + - `Storages/ManagedCode.Storage.CloudKit/Options/CloudKitStorageOptions.cs` + - `Storages/ManagedCode.Storage.CloudKit/Extensions/ServiceCollectionExtensions.cs` +- Supporting models: + - `Storages/ManagedCode.Storage.CloudKit/Clients/CloudKitRecord.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.CloudKit +``` + +```csharp +using ManagedCode.Storage.CloudKit.Extensions; +using ManagedCode.Storage.CloudKit.Options; + +builder.Services.AddCloudKitStorageAsDefault(options => +{ + options.ContainerId = "iCloud.com.company.app"; + options.Environment = CloudKitEnvironment.Production; + options.Database = CloudKitDatabase.Public; + options.RootPath = "app-data"; + options.ApiToken = configuration["CloudKit:ApiToken"]; +}); +``` + +## Current Behavior + +- Supports multiple auth modes: + - API token (`ckAPIToken`) via `CloudKitStorageOptions.ApiToken` + - optional web auth token (`ckWebAuthToken`) for user-scoped scenarios (rotated by CloudKit on each request) + - server-to-server signed requests for supported scenarios via `ServerToServerKeyId` + `ServerToServerPrivateKeyPem` +- Record type/field names are configurable to match CloudKit schema: + - `RecordType` (default `MCStorageFile`) + - `PathFieldName`, `AssetFieldName`, `ContentTypeFieldName` + +## Tests + +- HTTP/SDK-level fake: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs` + - `Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitClientHttpTests.cs` +- Storage behaviour via fake client: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageTests.cs` +- DI + provider plumbing: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitDependencyInjectionTests.cs` + - `Tests/ManagedCode.Storage.Tests/Storages/CloudKit/CloudKitStorageProviderTests.cs` + +## Configuration Notes + +See `docs/Development/credentials.md` and `README.md` for step-by-step CloudKit setup (container id, schema, and tokens/keys). diff --git a/docs/Features/provider-dropbox.md b/docs/Features/provider-dropbox.md new file mode 100644 index 0000000..fd7b7ab --- /dev/null +++ b/docs/Features/provider-dropbox.md @@ -0,0 +1,88 @@ +# Feature: Dropbox Provider (`ManagedCode.Storage.Dropbox`) + +## Purpose + +Expose **Dropbox** as `IStorage` so .NET apps can use the Dropbox API via a consistent upload/download/list/delete abstraction: + +- upload/download/list/delete through a consistent API +- support “library-managed client creation” from credentials for the common cases +- keep a clean swap point (`IDropboxClientWrapper`) for tests and advanced scenarios + +## Main Flows + +```mermaid +flowchart LR + App --> DB[DropboxStorage : IDropboxStorage] + DB --> Client[IDropboxClientWrapper] + Client --> DropboxAPI[Dropbox API] +``` + +## Components + +- Storage: + - `Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs` + - `Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs` +- Client wrapper: + - `Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs` + - `Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs` +- Options / DI: + - `Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs` + - `Storages/ManagedCode.Storage.Dropbox/Extensions/ServiceCollectionExtensions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Dropbox +``` + +Access token (simple): + +```csharp +using ManagedCode.Storage.Dropbox.Extensions; + +builder.Services.AddDropboxStorageAsDefault(options => +{ + options.AccessToken = configuration["Dropbox:AccessToken"]; + options.RootPath = "/Apps/my-app"; +}); +``` + +Refresh token (recommended for production/offline access): + +```csharp +using ManagedCode.Storage.Dropbox.Extensions; + +builder.Services.AddDropboxStorageAsDefault(options => +{ + options.RefreshToken = configuration["Dropbox:RefreshToken"]; + options.AppKey = configuration["Dropbox:AppKey"]; + options.AppSecret = configuration["Dropbox:AppSecret"]; // optional in PKCE flows + options.RootPath = "/Apps/my-app"; +}); +``` + +## Current Behavior + +Supported configuration modes: + +- Provide a swap point (tests / custom behaviour): + - `DropboxStorageOptions.Client` (custom `IDropboxClientWrapper`) +- Provide credentials and let the provider build the SDK client: + - `AccessToken` (+ optional `DropboxClientConfig`) + - OR `RefreshToken` + `AppKey` (+ optional `AppSecret`, `DropboxClientConfig`) + +`RootPath` scopes all operations (for App Folder apps use `/Apps/`). + +## Tests + +- HTTP/SDK-level fake: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs` +- Storage behaviour via fake client: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs` +- DI + provider plumbing: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs` + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs` + +## Configuration Notes + +See `docs/Development/credentials.md` and `README.md` for step-by-step instructions on creating an app, enabling scopes, and obtaining tokens. diff --git a/docs/Features/provider-filesystem.md b/docs/Features/provider-filesystem.md new file mode 100644 index 0000000..b76a872 --- /dev/null +++ b/docs/Features/provider-filesystem.md @@ -0,0 +1,56 @@ +# Feature: File System Provider (`ManagedCode.Storage.FileSystem`) + +## Purpose + +Implement `IStorage` on top of the local file system so you can use the same abstraction in production code, local development, and tests: + +- local development +- on-prem/hybrid deployments +- tests and demos + +## Main Flows + +```mermaid +flowchart LR + App --> FS[FileSystemStorage : IFileSystemStorage] + FS --> IO[System.IO] + IO --> Disk[(Disk)] +``` + +## Components + +- `Storages/ManagedCode.Storage.FileSystem/FileSystemStorage.cs` +- `Storages/ManagedCode.Storage.FileSystem/FileSystemStorageProvider.cs` +- DI: + - `Storages/ManagedCode.Storage.FileSystem/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.FileSystem/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.FileSystem/Options/FileSystemStorageOptions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.FileSystem +``` + +```csharp +using ManagedCode.Storage.FileSystem.Extensions; + +builder.Services.AddFileSystemStorageAsDefault(options => +{ + options.BaseFolder = Path.Combine(builder.Environment.ContentRootPath, "storage"); +}); +``` + +## Current Behavior + +- `BaseFolder` acts as the container root. +- Supports directory creation when `CreateContainerIfNotExists = true`. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemDownloadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemBlobTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemContainerTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemSecurityTests.cs` diff --git a/docs/Features/provider-google-cloud-storage.md b/docs/Features/provider-google-cloud-storage.md new file mode 100644 index 0000000..d48d119 --- /dev/null +++ b/docs/Features/provider-google-cloud-storage.md @@ -0,0 +1,60 @@ +# Feature: Google Cloud Storage Provider (`ManagedCode.Storage.Gcp`) + +## Purpose + +Implement `IStorage` on top of **Google Cloud Storage (GCS)** using `Google.Cloud.Storage.V1`. + +## Main Flows + +```mermaid +flowchart LR + App --> GCS[GCPStorage : IGCPStorage] + GCS --> SDK[Google.Cloud.Storage.V1 StorageClient] + SDK --> Cloud[(GCS)] +``` + +## Components + +- `Storages/ManagedCode.Storage.Google/GCPStorage.cs` +- `Storages/ManagedCode.Storage.Google/GCPStorageProvider.cs` +- DI: + - `Storages/ManagedCode.Storage.Google/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.Google/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.Google/Options/GCPStorageOptions.cs` + - `Storages/ManagedCode.Storage.Google/Options/BucketOptions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Gcp +``` + +```csharp +using Google.Apis.Auth.OAuth2; +using ManagedCode.Storage.Google.Extensions; +using ManagedCode.Storage.Google.Options; + +builder.Services.AddGCPStorageAsDefault(options => +{ + options.GoogleCredential = GoogleCredential.FromFile("service-account.json"); + options.BucketOptions = new BucketOptions + { + ProjectId = "my-project-id", + Bucket = "my-bucket" + }; +}); +``` + +## Current Behavior + +- Supports file-based auth (`AuthFileName`) and direct `GoogleCredential`. +- Bucket creation can be enabled via `CreateContainerIfNotExists`. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSUploadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSDownloadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSBlobTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSContainerTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/GCS/GCSConfigTests.cs` diff --git a/docs/Features/provider-googledrive.md b/docs/Features/provider-googledrive.md new file mode 100644 index 0000000..06723b2 --- /dev/null +++ b/docs/Features/provider-googledrive.md @@ -0,0 +1,80 @@ +# Feature: Google Drive Provider (`ManagedCode.Storage.GoogleDrive`) + +## Purpose + +Expose **Google Drive** as `IStorage` so .NET apps can store files in Google Drive via the official Drive API while keeping OAuth/auth concerns in the hosting app: + +- upload/download/list/delete through a consistent API +- keep auth concerns in the hosting app (you provide a `DriveService`), while still allowing a swap point for tests + +## Main Flows + +```mermaid +flowchart LR + App --> GD[GoogleDriveStorage : IGoogleDriveStorage] + GD --> Client[IGoogleDriveClient] + Client --> Drive[Google Drive API] +``` + +## Components + +- Storage: + - `Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs` + - `Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs` +- Client wrapper: + - `Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs` + - `Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs` +- Options / DI: + - `Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs` + - `Storages/ManagedCode.Storage.GoogleDrive/Extensions/ServiceCollectionExtensions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.GoogleDrive +dotnet add package Google.Apis.Drive.v3 +``` + +```csharp +using Google.Apis.Auth.OAuth2; +using Google.Apis.Drive.v3; +using Google.Apis.Services; +using ManagedCode.Storage.GoogleDrive.Extensions; + +var credential = GoogleCredential + .FromFile("service-account.json") + .CreateScoped(DriveService.Scope.Drive); + +var driveService = new DriveService(new BaseClientService.Initializer +{ + HttpClientInitializer = credential, + ApplicationName = "MyApp" +}); + +builder.Services.AddGoogleDriveStorageAsDefault(options => +{ + options.DriveService = driveService; + options.RootFolderId = "root"; +}); +``` + +## Current Behavior + +- You can provide either: + - `GoogleDriveStorageOptions.Client` (custom `IGoogleDriveClient` swap point, mainly for tests), or + - `GoogleDriveStorageOptions.DriveService` (official SDK client) +- `RootFolderId` scopes all operations (defaults to `root`). + +## Tests + +- HTTP/SDK-level fake: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs` +- Storage behaviour via fake client: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs` +- DI + provider plumbing: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs` + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs` + +## Configuration Notes + +See `docs/Development/credentials.md` and `README.md` for step-by-step instructions on Drive API credentials (service account or OAuth client). diff --git a/docs/Features/provider-onedrive.md b/docs/Features/provider-onedrive.md new file mode 100644 index 0000000..71a3a58 --- /dev/null +++ b/docs/Features/provider-onedrive.md @@ -0,0 +1,77 @@ +# Feature: OneDrive Provider (`ManagedCode.Storage.OneDrive`) + +## Purpose + +Expose **OneDrive / Microsoft Graph** as `IStorage` so .NET apps can store files in a drive/folder via Graph using the same provider-agnostic upload/download/list/delete API as other backends: + +- upload/download/list/delete through a consistent API +- keep auth concerns in the hosting app (you provide a `GraphServiceClient`), while still allowing a swap point for tests + +## Main Flows + +```mermaid +flowchart LR + App --> OD[OneDriveStorage : IOneDriveStorage] + OD --> Client[IOneDriveClient] + Client --> Graph[Microsoft Graph] +``` + +## Components + +- Storage: + - `Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs` + - `Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs` +- Client wrapper: + - `Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs` + - `Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs` +- Options / DI: + - `Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs` + - `Storages/ManagedCode.Storage.OneDrive/Extensions/ServiceCollectionExtensions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.OneDrive +dotnet add package Azure.Identity +``` + +```csharp +using Azure.Identity; +using ManagedCode.Storage.OneDrive.Extensions; +using Microsoft.Graph; + +var credential = new ClientSecretCredential( + tenantId: configuration["OneDrive:TenantId"]!, + clientId: configuration["OneDrive:ClientId"]!, + clientSecret: configuration["OneDrive:ClientSecret"]!); + +var graphClient = new GraphServiceClient(credential, new[] { "https://graph.microsoft.com/.default" }); + +builder.Services.AddOneDriveStorageAsDefault(options => +{ + options.GraphClient = graphClient; + options.DriveId = "me"; + options.RootPath = "app-data"; +}); +``` + +## Current Behavior + +- You can provide either: + - `OneDriveStorageOptions.Client` (custom `IOneDriveClient` swap point, mainly for tests), or + - `OneDriveStorageOptions.GraphClient` (official SDK client) +- Container deletion is not supported (drives/folders are account-managed). + +## Tests + +- HTTP/SDK-level fake: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs` +- Storage behaviour via fake client: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs` +- DI + provider plumbing: + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveDependencyInjectionTests.cs` + - `Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageProviderTests.cs` + +## Configuration Notes + +See `docs/Development/credentials.md` and `README.md` for step-by-step instructions on Entra ID app registration and Graph auth. diff --git a/docs/Features/provider-sftp.md b/docs/Features/provider-sftp.md new file mode 100644 index 0000000..9299ca0 --- /dev/null +++ b/docs/Features/provider-sftp.md @@ -0,0 +1,56 @@ +# Feature: SFTP Provider (`ManagedCode.Storage.Sftp`) + +## Purpose + +Implement `IStorage` on top of SFTP using SSH (for legacy systems and air-gapped environments). + +## Main Flows + +```mermaid +flowchart LR + App --> S[SftpStorage : ISftpStorage] + S --> SSH[SSH.NET] + SSH --> Server[(SFTP Server)] +``` + +## Components + +- `Storages/ManagedCode.Storage.Sftp/SftpStorage.cs` +- `Storages/ManagedCode.Storage.Sftp/SftpStorageProvider.cs` +- DI: + - `Storages/ManagedCode.Storage.Sftp/Extensions/ServiceCollectionExtensions.cs` + - `Storages/ManagedCode.Storage.Sftp/Extensions/StorageFactoryExtensions.cs` +- Options: + - `Storages/ManagedCode.Storage.Sftp/Options/SftpStorageOptions.cs` + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.Sftp +``` + +```csharp +using ManagedCode.Storage.Sftp.Extensions; + +builder.Services.AddSftpStorageAsDefault(options => +{ + options.Host = "sftp.example.com"; + options.Username = configuration["Sftp:Username"]; + options.Password = configuration["Sftp:Password"]; + options.RemoteDirectory = "/uploads"; +}); +``` + +## Current Behavior + +- Supports password and key-based auth (`PrivateKeyPath` / `PrivateKeyContent`). +- `AcceptAnyHostKey` exists for dev/test only; production should use `HostKeyFingerprint`. +- Can create directories automatically (`CreateDirectoryIfNotExists`). + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Storages/Sftp/SftpUploadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Sftp/SftpDownloadTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Sftp/SftpBlobTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Sftp/SftpStreamTests.cs` +- `Tests/ManagedCode.Storage.Tests/Storages/Sftp/SftpContainerTests.cs` diff --git a/docs/Features/storage-core.md b/docs/Features/storage-core.md new file mode 100644 index 0000000..92a1dae --- /dev/null +++ b/docs/Features/storage-core.md @@ -0,0 +1,67 @@ +# Feature: Storage Core Abstraction (`ManagedCode.Storage.Core`) + +## Purpose + +Provide a single, provider-agnostic storage API for .NET so application code can upload/download/list/stream files without being coupled to vendor SDKs. + +The core package defines: + +- `IStorage` and related capability interfaces (`IUploader`, `IDownloader`, `IStreamer`) +- consistent `Result` / `Result` error handling +- common option models (upload/download/delete/metadata/legal hold) +- provider factories (`IStorageProvider`, `IStorageFactory`) and DI helpers + +## Main Flows + +### 1) Upload / Download + +```mermaid +flowchart LR + App[Application code] --> I[IStorage] + I --> B[BaseStorage] + B --> C[Provider storage client] + C --> SDK[Vendor SDK / API] + SDK --> B + B --> I + I --> App +``` + +### 2) Options-driven pathing + metadata + +- Callers pass `UploadOptions` / `DownloadOptions` / `MetadataOptions` to control: + - `FileName`, `Directory` (logical path) + - `MimeType` (normalized through `MimeHelper`) + - per-provider metadata where supported + +## Components + +Key files: + +- `ManagedCode.Storage.Core/IStorage.cs` — main abstraction surface +- `ManagedCode.Storage.Core/BaseStorage.cs` — shared lifecycle + template methods for providers +- `ManagedCode.Storage.Core/Models/*.cs` — option models and `BlobMetadata` / `LocalFile` +- `ManagedCode.Storage.Core/Helpers/Crc32Helper.cs` — streamed CRC32 helpers used by chunked uploads/downloads +- `ManagedCode.Storage.Core/Helpers/PathHelper.cs` — shared path manipulation utilities +- `ManagedCode.Storage.Core/Providers/*` — provider factory abstractions and the default `StorageFactory` +- `ManagedCode.Storage.Core/Extensions/ServiceCollectionExtensions.cs` — DI helpers + +## Current Behavior + +- Provider implementations typically inherit `BaseStorage` and only implement provider-specific operations. +- Public operations return `Result`/`Result` with: + - `IsSuccess` and (when successful) `Value` + - exceptions captured and returned as failed results +- MIME type lookups should flow through `MimeHelper` (do not add ad-hoc MIME detection). + +## Tests + +- `Tests/ManagedCode.Storage.Tests/Core/Crc32HelperTests.cs` +- `Tests/ManagedCode.Storage.Tests/Core/LocalFileTests.cs` +- `Tests/ManagedCode.Storage.Tests/Core/StringStreamTests.cs` +- `Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs` (core chunk behaviour used by clients) + +## References + +- `README.md` (package list + quickstart) +- `docs/Testing/strategy.md` (suite structure and rules) + diff --git a/docs/Features/testfakes.md b/docs/Features/testfakes.md new file mode 100644 index 0000000..a971e58 --- /dev/null +++ b/docs/Features/testfakes.md @@ -0,0 +1,37 @@ +# Feature: Test Fakes (`ManagedCode.Storage.TestFakes`) + +## Purpose + +Provide lightweight storage doubles for tests and demos, allowing consumers to replace real provider registrations without provisioning cloud resources. + +These fakes are intended for fast tests where provider-specific behaviour is not the subject under test. + +## Main Flows + +- Register a real provider (for production wiring). +- Replace it with a fake in tests using DI replacement helpers. + +```mermaid +flowchart LR + App[App/Test] --> DI[DI container] + DI --> Real[Real provider registration] + DI --> Fake[Replace*() -> Fake provider] + Fake --> Tests[Fast tests without cloud accounts] +``` + +## Components + +- `ManagedCode.Storage.TestFakes/FakeAzureStorage.cs` +- `ManagedCode.Storage.TestFakes/FakeAzureDataLakeStorage.cs` +- `ManagedCode.Storage.TestFakes/FakeAWSStorage.cs` +- `ManagedCode.Storage.TestFakes/FakeGoogleStorage.cs` +- `ManagedCode.Storage.TestFakes/MockCollectionExtensions.cs` (replacement helpers) + +## Current Behavior + +- Fakes are resolved through `Microsoft.Extensions.DependencyInjection` and implement the same provider interfaces as the real storages. +- Prefer full integration tests (Testcontainers / HTTP fakes) for verifying provider-specific behaviour; use fakes for “consumer wiring” tests. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/ExtensionsTests/ReplaceExtensionsTests.cs` diff --git a/docs/Features/virtual-file-system.md b/docs/Features/virtual-file-system.md new file mode 100644 index 0000000..8605864 --- /dev/null +++ b/docs/Features/virtual-file-system.md @@ -0,0 +1,90 @@ +# Feature: Virtual File System (`ManagedCode.Storage.VirtualFileSystem`) + +## Purpose + +Expose a higher-level “virtual file system” API on top of `IStorage`: + +- file/directory objects (`IVirtualFile`, `IVirtualDirectory`) +- metadata caching and convenience operations (exists, read/write streams, range reads) +- predictable behaviour across providers by routing everything through the same `IStorage` surface + +This is *not* a separate cloud provider. It is an abstraction layer that sits above any configured `IStorage`. + +## Main Flows + +### File operations + +```mermaid +flowchart TD + A[Caller] --> VFS[IVirtualFileSystem] + VFS --> VF[IVirtualFile] + VF --> S[IStorage] + S --> P[Concrete provider] +``` + +### Directory operations + +- Directory listing is implemented by prefix-listing blob metadata via `IStorage.GetBlobMetadataListAsync(...)`. + +## Components + +Key files: + +- `ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFileSystem.cs` — VFS entry point; wraps `IStorage` +- `ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualFile.cs` — file operations (`OpenReadAsync`, `OpenWriteAsync`, metadata) +- `ManagedCode.Storage.VirtualFileSystem/Implementations/VirtualDirectory.cs` — directory operations (list, delete) +- `ManagedCode.Storage.VirtualFileSystem/Core/VfsPath.cs` — path handling +- `ManagedCode.Storage.VirtualFileSystem/Options/VfsOptions.cs` — caching + defaults +- `ManagedCode.Storage.VirtualFileSystem/Streaming/VfsWriteStream.cs` — buffered write stream that uploads on dispose + +## DI Wiring + +```bash +dotnet add package ManagedCode.Storage.VirtualFileSystem +``` + +VFS requires an `IStorage` to be registered first (any provider works). Then you add the overlay: + +```csharp +using ManagedCode.Storage.FileSystem.Extensions; +using ManagedCode.Storage.VirtualFileSystem.Core; +using ManagedCode.Storage.VirtualFileSystem.Extensions; + +builder.Services.AddFileSystemStorageAsDefault(options => +{ + options.BaseFolder = Path.Combine(builder.Environment.ContentRootPath, "storage"); +}); + +builder.Services.AddVirtualFileSystem(options => +{ + options.DefaultContainer = "vfs"; + options.EnableCache = true; +}); + +public sealed class MyService(IVirtualFileSystem vfs) +{ + public async Task DemoAsync(CancellationToken ct) + { + var file = await vfs.GetFileAsync("docs/readme.txt", ct); + await file.WriteAllTextAsync("hello", cancellationToken: ct); + + var text = await file.ReadAllTextAsync(cancellationToken: ct); + } +} +``` + +## Current Behavior + +- Existence checks and metadata can be cached in `IMemoryCache` when enabled via `VfsOptions`. +- `OpenWriteAsync` currently uses a buffered write stream (`VfsWriteStream`) that uploads when the stream is disposed. +- Concurrency checks can be enforced via `WriteOptions.ExpectedETag` where supported by the underlying provider. + +## Tests + +- `Tests/ManagedCode.Storage.Tests/VirtualFileSystem/VirtualFileSystemTests.cs` +- `Tests/ManagedCode.Storage.Tests/VirtualFileSystem/VirtualFileSystemManagerTests.cs` +- `Tests/ManagedCode.Storage.Tests/VirtualFileSystem/*VirtualFileSystemFixture.cs` (cross-provider fixtures) + +## References + +- `docs/Testing/strategy.md` diff --git a/docs/Testing/strategy.md b/docs/Testing/strategy.md new file mode 100644 index 0000000..a1ed72a --- /dev/null +++ b/docs/Testing/strategy.md @@ -0,0 +1,62 @@ +# Testing Strategy + +ManagedCode.Storage uses **xUnit** + **Shouldly** and aims to verify storage behaviour through realistic flows (upload/download/list/delete/metadata) with minimal mocking. + +## Test Project + +- Primary suite: `Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj` + +## Suite Map + +```mermaid +flowchart TD + Tests[ManagedCode.Storage.Tests] --> Core[Core helpers + invariants] + Tests --> Providers[Provider suites] + Tests --> AspNet[ASP.NET controllers + SignalR] + Tests --> Vfs[VFS suites] + + Providers --> Containers[Testcontainers (Azurite/LocalStack/FakeGcsServer/SFTP)] + Providers --> CloudDrive[CloudDrive (Graph/Drive/Dropbox)] + CloudDrive --> HttpFakes[HttpMessageHandler fakes wired into official SDK clients] +``` + +## Structure + +Tests are grouped by “surface” and provider: + +- `Tests/ManagedCode.Storage.Tests/Core/` — `ManagedCode.Storage.Core` behaviour (helpers, options, invariants) +- `Tests/ManagedCode.Storage.Tests/VirtualFileSystem/` — VFS behaviour and fixtures +- `Tests/ManagedCode.Storage.Tests/Storages/*/` — provider suites (Azure/AWS/GCS/FileSystem/Sftp/CloudDrive/CloudKit) +- `Tests/ManagedCode.Storage.Tests/AspNetTests/` — ASP.NET controllers + SignalR flows (end-to-end via in-process test host) +- `Tests/ManagedCode.Storage.Tests/Common/` — shared test utilities, Testcontainers helpers, test app host + +## External Dependencies + +Where possible, tests run without real cloud accounts: + +- Azure/AWS/GCS/SFTP suites use **Testcontainers** (Azurite, LocalStack, FakeGcsServer, SFTP container). +- CloudDrive suites (OneDrive/Google Drive/Dropbox) use `HttpMessageHandler`-based fakes wired into the **official SDK clients**, asserting real behaviour over HTTP without hitting the network. + +## Categories + +Some tests are marked as “large file” to validate streaming behaviour: + +- `[Trait("Category", "LargeFile")]` + +Run everything (canonical): + +```bash +dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release +``` + +Skip large-file tests when iterating: + +```bash +dotnet test Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj --configuration Release --filter "Category!=LargeFile" +``` + +## Quality Rules + +- Each test must assert concrete, observable behaviour (state/output/errors/side-effects). +- Mocks/fakes are allowed only for **external** systems that cannot reasonably run in tests; the fake must match the official API surface (paths, status codes, payload shapes). +- Do not weaken or delete tests to make them pass; fix the behaviour instead. diff --git a/docs/server-streaming-plan.md b/docs/server-streaming-plan.md index c24dc07..c3a9f9d 100644 --- a/docs/server-streaming-plan.md +++ b/docs/server-streaming-plan.md @@ -5,6 +5,40 @@ - Deliver matching HTTP and SignalR clients that can stream files, resume transfers, and interoperate with the controllers by default. - Maintain a provider-agnostic test suite that validates the contract across file system and all cloud storages. +## High-level Architecture + +```mermaid +flowchart LR + subgraph Clients + HttpClient[ManagedCode.Storage.Client (HTTP)] + SigClient[ManagedCode.Storage.Client.SignalR] + Browser[Browser / App] + end + + subgraph Server + Controllers[ASP.NET Controllers] + Hub[SignalR Hub] + Chunk[ChunkUploadService] + end + + subgraph Storage + Abstraction[IStorage] + Provider[Concrete provider (Azure/AWS/GCS/FS/SFTP/etc.)] + end + + Browser --> HttpClient + Browser --> SigClient + HttpClient --> Controllers + SigClient --> Hub + + Controllers --> Chunk + Hub --> Chunk + + Controllers --> Abstraction + Hub --> Abstraction + Abstraction --> Provider +``` + ## HTTP API Surface - `POST /api/storage/upload` — multipart upload for small/medium files; stores directly via `IStorage.UploadAsync`. - `POST /api/storage/upload/stream` — accepts raw stream (`application/octet-stream`) with `X-File-Name`, `X-Content-Type`, optional `X-Directory`; handles large uploads without buffering when possible. @@ -64,4 +98,3 @@ - HLS playlist generation for video streaming. - Server-sent events for progress notifications (bridge from hub to HTTP clients). - gRPC alternative endpoints when HTTP/3 is available. - diff --git a/docs/templates/ADR-Template.md b/docs/templates/ADR-Template.md new file mode 100644 index 0000000..3c0a418 --- /dev/null +++ b/docs/templates/ADR-Template.md @@ -0,0 +1,42 @@ +# ADR {{NNN}}: {{Title}} + +- Status: Proposed | Accepted | Superseded +- Date: {{YYYY-MM-DD}} + +## Context + +What problem are we solving? What constraints matter? + +## Decision + +What did we decide and why? + +## Diagram + +```mermaid +flowchart LR + A[Before] --> B[Decision] + B --> C[After] +``` + +## Options Considered + +### Option A + +- Pros: +- Cons: + +### Option B + +- Pros: +- Cons: + +## Consequences + +What changes as a result (code, tests, docs, operational impact)? + +## Links + +- Related features: +- Related code: +- External references: diff --git a/docs/templates/Feature-Template.md b/docs/templates/Feature-Template.md new file mode 100644 index 0000000..0dd6529 --- /dev/null +++ b/docs/templates/Feature-Template.md @@ -0,0 +1,53 @@ +# Feature: {{Feature Name}} + +## Purpose + +Explain what the feature does and why it exists. + +## Main Flows + +Describe the primary user / system flows. + +```mermaid +flowchart TD + A[Caller] --> B[Component] + B --> C[Dependency] + C --> D[Result] +``` + +## Components + +List the main code components involved (projects, folders, classes, public APIs). + +- Projects: +- Key types: +- Key entry points: + +## Current Behavior + +Describe how it behaves today (happy path, errors, edge cases, constraints). + +## Configuration + +Document configuration and DI wiring (options, required secrets, environment variables). + +## Tests + +List the tests that cover this feature and what they assert. + +- Existing tests: +- Gaps / TODO: + +## Definition of Done + +- Core flow is implemented and documented. +- Automated tests verify real behaviour (not only mocked interactions). +- `dotnet test` passes for the repository test project. +- Docs updated (README + `docs/`). + +## References + +- Related docs: +- Related ADRs: +- External specs / SDK docs: + diff --git a/github-pages/404.html b/github-pages/404.html new file mode 100644 index 0000000..c7a0d8f --- /dev/null +++ b/github-pages/404.html @@ -0,0 +1,16 @@ +--- +layout: default +title: "404" +description: This page doesn't exist. Check the documentation navigation or go back home. +permalink: /404.html +--- + +
+

404

+

This page doesn't exist.

+

Check the navigation links or return to the home page.

+ +
diff --git a/github-pages/_config.yml b/github-pages/_config.yml new file mode 100644 index 0000000..b49d374 --- /dev/null +++ b/github-pages/_config.yml @@ -0,0 +1,27 @@ +title: ManagedCode.Storage +tagline: Cross-provider storage toolkit for .NET +description: ManagedCode.Storage wraps vendor SDKs behind a single IStorage abstraction so uploads, downloads, metadata, streaming, and retention behave the same across providers. +url: "https://managedcode.github.io" +baseurl: "/Storage" + +# SEO & AEO (AI Engine Optimization) +keywords: "ManagedCode.Storage, IStorage, blob storage, Azure Blob Storage, Azure Data Lake, Amazon S3, Google Cloud Storage, OneDrive, Google Drive, Dropbox, CloudKit, SFTP, .NET, ASP.NET, SignalR, streaming uploads, chunked uploads" +author: ManagedCode + +# Structured Data for LLMs +organization: ManagedCode +github_repo: managedcode/Storage +license: MIT +og_image: /assets/images/og-image.png + +markdown: kramdown +kramdown: + auto_ids: true + input: GFM + +plugins: + - jekyll-seo-tag + +exclude: + - Gemfile + - Gemfile.lock diff --git a/github-pages/_layouts/default.html b/github-pages/_layouts/default.html new file mode 100644 index 0000000..585f9c9 --- /dev/null +++ b/github-pages/_layouts/default.html @@ -0,0 +1,222 @@ + + + + + + + + {% if page.is_home %} + {{ site.title }} - {{ site.tagline }} + + {% else %} + {{ page.title }} | {{ site.title }} + + {% endif %} + + + + {% if page.url == '/404.html' %} + + {% else %} + + {% endif %} + + + + + + + + {% if page.is_home %} + + {% else %} + + {% endif %} + + + + + + + + + + {% if page.is_home %} + + {% else %} + + {% endif %} + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ {{ content }} +
+
+ + + +
+ +
+ + + + + + + + + + + + diff --git a/github-pages/adr/index.md b/github-pages/adr/index.md new file mode 100644 index 0000000..d1b3dbe --- /dev/null +++ b/github-pages/adr/index.md @@ -0,0 +1,13 @@ +--- +layout: default +title: ADR +description: Architecture Decision Records (ADR) for ManagedCode.Storage. +nav_order: 6 +--- + +# ADR + +This page is generated from `docs/ADR/` in CI (GitHub Pages workflow). + +Start here: `docs/ADR/index.md`. + diff --git a/github-pages/api/index.md b/github-pages/api/index.md new file mode 100644 index 0000000..5a3a8d4 --- /dev/null +++ b/github-pages/api/index.md @@ -0,0 +1,8 @@ +--- +layout: default +title: API +description: HTTP and SignalR API documentation for ManagedCode.Storage.Server. +nav_order: 6 +--- + + diff --git a/github-pages/assets/css/style.css b/github-pages/assets/css/style.css new file mode 100644 index 0000000..c56735f --- /dev/null +++ b/github-pages/assets/css/style.css @@ -0,0 +1,1049 @@ +/* Table of Contents */ +.toc { + background: #F5F3F7; + border-radius: 12px; + padding: 24px 28px; + margin-bottom: 40px; + border: 1px solid #E8E5EB; +} + +.toc-title { + font-size: 14px; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.5px; + color: #6b6574; + margin-bottom: 16px; +} + +.toc ol { + list-style: none; + padding-left: 0; + margin-bottom: 0; + counter-reset: toc-counter; +} + +.toc > ol > li { + counter-increment: toc-counter; + margin-bottom: 8px; +} + +.toc > ol > li > a::before { + content: counter(toc-counter) ". "; + color: #785D8F; + font-weight: 500; +} + +.toc a { + color: #353D4C; + font-size: 15px; + line-height: 1.5; + text-decoration: none; +} + +.toc a:hover { + color: #785D8F; +} + +/* Basic Reset */ +* { + margin: 0; + padding: 0; + box-sizing: border-box; +} + +html, body { + overflow-x: hidden; + width: 100%; +} + +body { + font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', sans-serif; + font-size: 17px; + color: #27232A; + background: #FFFFFF; + line-height: 1.7; + min-height: 100vh; + display: flex; + flex-direction: column; + -webkit-font-smoothing: antialiased; + -moz-osx-font-smoothing: grayscale; + text-rendering: optimizeLegibility; + font-feature-settings: "kern" 1, "liga" 1; +} + +html { + scroll-behavior: smooth; +} + +*:focus { + outline: 2px solid #785D8F; + outline-offset: 2px; +} + +/* Navigation */ +.nav { + display: flex; + flex-direction: row; + justify-content: space-between; + align-items: center; + padding: 20px 60px; + width: 100%; + max-width: 1280px; + margin: 0 auto; + position: sticky; + top: 0; + background-color: rgba(255, 255, 255, 0.95); + backdrop-filter: blur(10px); + z-index: 100; +} + +.nav-logo { + font-weight: 600; + font-size: 22px; + letter-spacing: -0.5px; + background: linear-gradient(to right, #27232A, #785D8F); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; + text-decoration: none; +} + +.nav-links { + display: flex; + flex-direction: row; + gap: 28px; + align-items: center; + min-height: 40px; +} + +.nav-links a { + font-weight: 400; + font-size: 15px; + line-height: 1.5; + color: #27232A; + text-decoration: none; +} + +.nav-links a:hover, +.nav-links a.active { + color: #785D8F; +} + +.nav-links a.nav-brand { + font-weight: 500; + color: #785D8F; + padding: 8px 16px; + border: 1px solid #E8E5EB; + border-radius: 20px; +} + +.nav-links a.nav-brand:hover { + background: #785D8F; + color: white; + border-color: #785D8F; +} + +/* Main content */ +.main { + flex: 1; + width: 100%; + max-width: 800px; + margin: 0 auto; + padding: 48px 40px 80px; +} + +/* Typography */ +h1, h2, h3, h4, h5, h6 { + font-weight: 600; + color: #27232A; + letter-spacing: -0.02em; +} + +h1 { + font-size: 42px; + line-height: 1.15; + background: linear-gradient(to right, #27232A, #785D8F); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; + margin-bottom: 24px; + padding-bottom: 20px; + border-bottom: 1px solid #E8E5EB; +} + +h2 { + font-size: 28px; + line-height: 1.25; + margin-top: 56px; + margin-bottom: 20px; + padding-bottom: 12px; + border-bottom: 1px solid #E8E5EB; +} + +h3 { + font-size: 21px; + line-height: 1.35; + margin-top: 40px; + margin-bottom: 14px; +} + +h4 { + font-size: 18px; + line-height: 1.4; + margin-top: 32px; + margin-bottom: 12px; +} + +p { + font-size: 17px; + line-height: 1.7; + color: #353D4C; + margin-bottom: 20px; + letter-spacing: 0.01em; +} + +strong { + font-weight: 600; + color: #27232A; +} + +a { + color: #785D8F; + text-decoration: none; +} + +a:hover { + color: #4F3959; +} + +/* Lists */ +ul, ol { + margin-bottom: 24px; + padding-left: 24px; +} + +li { + font-size: 17px; + line-height: 1.5; + color: #353D4C; + margin-bottom: 6px; + letter-spacing: 0.01em; +} + +li > ul, li > ol { + margin-top: 6px; + margin-bottom: 0; +} + +/* Nested lists */ +li > ul > li, +li > ol > li { + margin-bottom: 4px; +} + +/* Code */ +code { + font-family: 'SF Mono', 'Fira Code', 'Consolas', monospace; + font-size: 0.88em; + background: #F5F3F7; + padding: 3px 7px; + border-radius: 5px; + color: #785D8F; +} + +pre { + background: #F5F3F7; + padding: 20px 24px; + border-radius: 12px; + overflow-x: auto; + margin: 24px 0; + border: 1px solid #E8E5EB; +} + +pre code { + background: none; + padding: 0; + color: #27232A; + font-size: 14px; + line-height: 1.6; +} + +/* Blockquote */ +blockquote { + border-left: 3px solid #785D8F; + padding: 16px 24px; + margin: 28px 0; + background: linear-gradient(135deg, rgba(120, 93, 143, 0.06), rgba(39, 35, 42, 0.02)); + border-radius: 0 12px 12px 0; +} + +blockquote p { + font-size: 17px; + font-style: italic; + color: #4F3959; + margin-bottom: 0; + line-height: 1.6; +} + +/* Tables */ +table { + width: 100%; + border-collapse: collapse; + margin: 28px 0; + border-radius: 12px; + overflow: hidden; + border: 1px solid #E8E5EB; + font-size: 15px; +} + +th, td { + padding: 14px 18px; + text-align: left; + line-height: 1.5; +} + +th { + background: #F5F3F7; + font-weight: 600; + color: #27232A; + font-size: 14px; + text-transform: uppercase; + letter-spacing: 0.5px; +} + +td { + border-top: 1px solid #E8E5EB; + color: #353D4C; +} + +/* Horizontal rule */ +hr { + border: none; + border-top: 1px solid #E8E5EB; + margin: 48px 0; +} + +/* Buttons */ +.btn { + display: inline-flex; + flex-direction: row; + justify-content: center; + align-items: center; + gap: 8px; + padding: 14px 24px; + border-radius: 28px; + font-family: inherit; + font-size: 15px; + font-weight: 500; + line-height: 1.4; + text-decoration: none; + transition: all 0.3s ease; + border: none; + cursor: pointer; + margin-right: 10px; + margin-bottom: 10px; +} + +.btn-primary { + background-color: #27232A; + color: #FFFFFF; +} + +.btn-primary:hover { + transform: translateY(-2px); + background-color: #4F3959; + text-decoration: none; +} + +.btn:not(.btn-primary) { + background-color: #FFFFFF; + color: #27232A; + border: 1px solid #E8E5EB; +} + +.btn:not(.btn-primary):hover { + background-color: #F5F3F7; + border-color: #785D8F; + text-decoration: none; +} + +/* Footer */ +.footer { + border-top: 1px solid #E8E5EB; + margin-top: auto; + background: #F5F3F7; +} + +.footer-container { + max-width: 800px; + margin: 0 auto; + padding: 28px 40px; + text-align: center; + color: #6b6574; + font-size: 12px; + line-height: 1.5; +} + +.footer-container p { + margin-bottom: 6px; +} + +.footer-container a { + color: #5a5565; + text-decoration: underline; + text-underline-offset: 2px; +} + +.footer-container a:hover { + color: #785D8F; +} + +.footer-version { + font-size: 11px; + color: #5a5565; + margin-top: 8px; +} + +/* Templates list */ +.templates-list { + display: flex; + flex-direction: column; + gap: 12px; + margin-top: 32px; +} + +.template-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 16px 20px; + background: #F5F3F7; + border-radius: 8px; + border: 1px solid #E8E5EB; +} + +.template-name { + font-family: 'SF Mono', 'Fira Code', 'Consolas', monospace; + font-size: 15px; + font-weight: 500; + color: #27232A; +} + +.template-links { + display: flex; + gap: 16px; +} + +.template-links a { + font-size: 14px; + color: #785D8F; + text-decoration: none; +} + +.template-links a:hover { + text-decoration: underline; +} + +/* Download button */ +.download-btn { + display: inline-flex; + align-items: center; + gap: 8px; + padding: 12px 24px; + background: #785D8F; + color: white; + border-radius: 8px; + font-weight: 500; + font-size: 15px; + text-decoration: none; + transition: all 0.2s ease; + margin: 16px 0; +} + +.download-btn:hover { + background: #4F3959; + color: white; + transform: translateY(-1px); +} + +body.dark .download-btn { + background: #a88bc4; + color: #1a1a1a; +} + +body.dark .download-btn:hover { + background: #c4a8d8; +} + +/* 404 Error page */ +.error-page { + text-align: center; + padding: 80px 20px; +} + +.error-page h1 { + font-size: 120px; + background: linear-gradient(to right, #27232A, #785D8F); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; + margin-bottom: 0; + border: none; + padding: 0; +} + +.error-subtitle { + font-size: 24px; + color: #27232A; + margin-bottom: 16px; +} + +.error-joke { + font-size: 17px; + color: #6b6574; + margin-bottom: 40px; +} + +.error-actions { + display: flex; + gap: 12px; + justify-content: center; + flex-wrap: wrap; +} + +/* Copy code button */ +.copy-btn { + position: absolute; + top: 8px; + right: 8px; + padding: 4px 10px; + font-size: 12px; + background: #27232A; + color: white; + border: none; + border-radius: 4px; + cursor: pointer; + opacity: 0; + transition: opacity 0.2s; +} + +pre:hover .copy-btn { + opacity: 1; +} + +.copy-btn:hover { + background: #785D8F; +} + +/* Dark mode toggle */ +.dark-toggle-label { + display: flex; + align-items: center; + cursor: pointer; +} + +.dark-toggle-label input { + display: none; +} + +.dark-toggle-label svg { + width: 20px; + height: 20px; + color: #27232A; +} + +.dark-toggle-label .icon-moon { + display: none; +} + +.dark-toggle-label:hover svg { + color: #785D8F; +} + +/* Dark mode styles */ +body.dark { + background: #1a1a1a; + color: #e0e0e0; +} + +body.dark .nav { + background-color: rgba(26, 26, 26, 0.95); +} + +body.dark .nav-logo { + background: linear-gradient(to right, #e0e0e0, #a88bc4); + -webkit-background-clip: text; + background-clip: text; +} + +body.dark .nav-links a { + color: #e0e0e0; +} + +body.dark .nav-links a:hover, +body.dark .nav-links a.active { + color: #a88bc4; +} + +body.dark .nav-links a.nav-brand { + color: #a88bc4; + border-color: #444; +} + +body.dark .nav-links a.nav-brand:hover { + background: #a88bc4; + color: #1a1a1a; +} + +body.dark .dark-toggle-label svg { + color: #e0e0e0; +} + +body.dark .dark-toggle-label .icon-sun { + display: none; +} + +body.dark .dark-toggle-label .icon-moon { + display: block; +} + +body.dark h1, body.dark h2, body.dark h3, body.dark h4 { + color: #e0e0e0; +} + +body.dark h1 { + background: linear-gradient(to right, #e0e0e0, #a88bc4); + -webkit-background-clip: text; + background-clip: text; + border-color: #333; +} + +body.dark h2 { + border-color: #333; +} + +body.dark p, body.dark li { + color: #b0b0b0; +} + +body.dark strong { + color: #e0e0e0; +} + +body.dark a { + color: #a88bc4; +} + +body.dark a:hover { + color: #c4a8d8; +} + +body.dark code { + background: #2a2a2a; + color: #a88bc4; +} + +body.dark pre { + background: #2a2a2a; + border-color: #333; +} + +body.dark pre code { + color: #e0e0e0; +} + +body.dark blockquote { + background: linear-gradient(135deg, rgba(168, 139, 196, 0.1), rgba(26, 26, 26, 0.5)); + border-color: #a88bc4; +} + +body.dark blockquote p { + color: #c4a8d8; +} + +body.dark .toc { + background: #2a2a2a; + border-color: #333; +} + +body.dark .toc-title { + color: #888; +} + +body.dark .toc a { + color: #b0b0b0; +} + +body.dark .toc a:hover { + color: #a88bc4; +} + +body.dark table { + border-color: #333; +} + +body.dark th { + background: #2a2a2a; + color: #e0e0e0; +} + +body.dark td { + border-color: #333; + color: #b0b0b0; +} + +body.dark hr { + border-color: #333; +} + +body.dark .footer { + background: #2a2a2a; + border-color: #333; +} + +body.dark .footer-container, +body.dark .footer-container a { + color: #888; +} + +body.dark .footer-container a:hover { + color: #a88bc4; +} + +body.dark .template-item { + background: #2a2a2a; + border-color: #333; +} + +body.dark .template-name { + color: #e0e0e0; +} + +body.dark .template-links a { + color: #a88bc4; +} + +body.dark .back-to-top { + background: #e0e0e0; + color: #1a1a1a; +} + +body.dark .back-to-top:hover { + background: #a88bc4; +} + +body.dark .btn-primary { + background: #e0e0e0; + color: #1a1a1a; +} + +body.dark .btn-primary:hover { + background: #a88bc4; +} + +body.dark .btn:not(.btn-primary) { + background: #2a2a2a; + color: #e0e0e0; + border-color: #444; +} + +body.dark .btn:not(.btn-primary):hover { + background: #333; + border-color: #a88bc4; +} + +/* Reading time */ +.reading-time { + font-size: 14px; + color: #6b6574; + margin-bottom: 24px; +} + +body.dark .reading-time { + color: #888; +} + +/* Details/Summary (collapsible FAQ sections) */ +details.faq-item { + background: #FFFFFF !important; + border: 1px solid #E8E5EB !important; + border-radius: 12px !important; + margin-bottom: 16px !important; + box-shadow: 0 2px 8px rgba(39, 35, 42, 0.06) !important; + overflow: hidden !important; +} + +details.faq-item[open] { + border-color: #785D8F !important; + box-shadow: 0 4px 12px rgba(120, 93, 143, 0.15) !important; +} + +details.faq-item summary { + padding: 18px 24px !important; + cursor: pointer !important; + font-weight: 600 !important; + font-size: 16px !important; + color: #27232A !important; + list-style: none !important; + display: block !important; + background: #FAFAFA !important; + margin: 0 !important; +} + +details.faq-item summary::-webkit-details-marker { + display: none !important; +} + +details.faq-item summary::marker { + display: none !important; + content: "" !important; +} + +details.faq-item summary:hover { + background: #F0EDF3 !important; +} + +details.faq-item[open] > summary { + color: #785D8F !important; + background: #F5F3F7 !important; + border-bottom: 1px solid #E8E5EB !important; +} + +details.faq-item .faq-answer { + padding: 20px 24px 20px 28px !important; + margin: 16px 20px 20px 20px !important; + color: #4a4a4a !important; + line-height: 1.8 !important; + background: #FAFAFA !important; + font-size: 15px !important; + border-left: 3px solid #785D8F !important; + border-radius: 0 8px 8px 0 !important; +} + +details.faq-item .faq-answer code { + background: #EDE9F0 !important; + padding: 2px 8px !important; + border-radius: 4px !important; + font-size: 14px !important; + color: #5a4a6a !important; +} + +/* Dark mode FAQ */ +body.dark details.faq-item { + background: #1a1a1a !important; + border-color: #333 !important; + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3) !important; +} + +body.dark details.faq-item[open] { + border-color: #a88bc4 !important; + box-shadow: 0 4px 12px rgba(168, 139, 196, 0.2) !important; +} + +body.dark details.faq-item summary { + color: #e0e0e0 !important; + background: #222 !important; +} + +body.dark details.faq-item summary:hover { + background: #2a2a2a !important; +} + +body.dark details.faq-item[open] > summary { + color: #a88bc4 !important; + background: #252525 !important; + border-bottom-color: #333 !important; +} + +body.dark details.faq-item .faq-answer { + color: #c0c0c0 !important; + background: #222 !important; + border-left-color: #a88bc4 !important; +} + +body.dark details.faq-item .faq-answer code { + background: #333 !important; + color: #c4a8d8 !important; +} + +/* Print styles */ +@media print { + .nav, + .back-to-top, + .dark-toggle-label, + .copy-btn, + .footer-version { + display: none !important; + } + + body { + background: white !important; + color: black !important; + font-size: 12pt; + line-height: 1.5; + } + + .main { + max-width: 100%; + padding: 0; + } + + h1, h2, h3, h4 { + color: black !important; + background: none !important; + -webkit-text-fill-color: black !important; + page-break-after: avoid; + } + + h1 { + font-size: 24pt; + border-bottom: 1px solid #ccc; + } + + h2 { + font-size: 18pt; + border-bottom: 1px solid #eee; + } + + p, li { + color: black !important; + } + + a { + color: black !important; + text-decoration: underline; + } + + a[href^="http"]:after { + content: " (" attr(href) ")"; + font-size: 10pt; + color: #666; + } + + pre, code { + background: #f5f5f5 !important; + border: 1px solid #ddd; + page-break-inside: avoid; + } + + blockquote { + border-left: 2px solid #999; + background: #f9f9f9 !important; + } + + .toc { + background: #f9f9f9 !important; + border: 1px solid #ddd; + page-break-inside: avoid; + } + + .footer { + border-top: 1px solid #ccc; + background: none !important; + } +} + +/* Back to top button */ +.back-to-top { + position: fixed !important; + bottom: 32px !important; + right: 32px !important; + left: auto !important; + width: 44px !important; + max-width: 44px !important; + height: 44px !important; + background: #27232A; + color: white; + border: none; + border-radius: 50%; + cursor: pointer; + display: flex; + align-items: center; + justify-content: center; + opacity: 0; + visibility: hidden; + transition: all 0.3s ease; + box-shadow: 0 2px 8px rgba(39, 35, 42, 0.15); + z-index: 99; +} + +.back-to-top.visible { + opacity: 1; + visibility: visible; +} + +.back-to-top:hover { + background: #785D8F; + transform: translateY(-2px); +} + +.back-to-top svg { + width: 18px; + height: 18px; +} + +/* Responsive */ +@media (max-width: 768px) { + .nav { + padding: 16px 24px; + flex-direction: column; + gap: 16px; + } + + .nav-links { + gap: 16px; + flex-wrap: wrap; + justify-content: center; + } + + .main { + padding: 32px 24px 60px; + } + + h1 { + font-size: 32px; + } + + h2 { + font-size: 24px; + margin-top: 40px; + } + + h3 { + font-size: 19px; + } + + p, li { + font-size: 16px; + } + + .footer-container { + padding: 24px; + } +} + +@media (max-width: 480px) { + body { + font-size: 16px; + } + + h1 { + font-size: 28px; + } + + h2 { + font-size: 22px; + } + + .btn:not(.back-to-top) { + width: 100%; + margin-right: 0; + } + + .nav-links a.nav-brand { + order: -1; + width: 100%; + text-align: center; + } +} diff --git a/github-pages/assets/images/favicon.svg b/github-pages/assets/images/favicon.svg new file mode 100644 index 0000000..cea9d9e --- /dev/null +++ b/github-pages/assets/images/favicon.svg @@ -0,0 +1,10 @@ + + + + + + + + + M + diff --git a/github-pages/assets/images/og-image.png b/github-pages/assets/images/og-image.png new file mode 100644 index 0000000000000000000000000000000000000000..8882708e2870fd8e48bc179d6790bda092de4a22 GIT binary patch literal 10830 zcmeHtXH*ki_wNMqP!vHx0RaJ(CSB>$6@h?&AiZ0tp-3mRM0^whAxM=_1p(>3ho(ZL zcR~*mAYcTfw?J-u-*xYoyY83!`9EvTnmJR>-m}j+v-ke(U({m*?MoLpF8}~=38JHE z3;}LUE%C$FKgn=wu${5aPclx!v41Le6EV#< z>1~=kw^kCM$o`-0|7C$3qE^&1wO9-uYusF;pwQ@DX+d%(6>@FU#IbMdB%l9TjymK) zWHzBJ)+_ManO!b>MxjarWO%Iw>7eEz72@m`sIg-Rzx~ri=0qlf#1B$O>Hu~(smT0P zWcDK0ZEAHKr9BKLdnK5u0V)CUN3n}c9glSY&ImQ&!81CC^~FVDakfNM;d|qIQG|ZB zZCM_;0Z=Ja`p|I5iab`ZZ|_v45QiYI@ERI8(E|tZJcvE9izl^zhL4|BHH!YBuH2%* zQ4haABb&T*VLWfRbu`2Z{8mf^cBkO4MxJ@|A5jmpzcvJJR<@hJocgV1V9@pm7-oHK z0GvaW+df=KIeVD!G6TrbrpW|sAnRJ4Pkqbaz=_i^eMc@o$U=-FGnmp)&mg6j#9hmm z(F2C1DKBf~jr8&$bb#|DAjg@;ns%{pJBZXWver|K|7$b1C}3~UAq21!NraQ|ykjFSd%+|eaXaX24oR-J5HnbEMlw+pz`@C_nGe^w>D4iFXxOfl z?bqq)>DL$}Z$outXY6J9n&esGc3pHm#?lS4L4+Q+a$=xOPl7|yDVFFI*i25)rNp31 zV+qe^k4_2m&Hh9K@b`qB**@&pOr529TY-7G&9PM50CvB-qW%0{96;{Hr>e#H_7Jj% z9NCbxg@uK<`h7bLG^1NK`*U2>;nMQYZ;?zA_X-ZAs|TS!kR|37gYA@$fZeAWmfZ%O z-L$=b{<4rDkBW^+#}%$04|s`tKW}EYx6%Wa_QTlh3-U4Y^1XTEaoz)U@3C24alD@| zMGt-D_x$p(;>JQu%l3*w9ib*jO)#If;aIVJ|6tb35Fgf`edcOq&**-P8_aB+RGhl5 zee%5+J(}v$MZAW+YmSBeF`3*h#!RxUR*BJtp9L2;P1<;vh1;UAJNnHwf8c4~ z=#L1=>Xmjd~jL z4U@Bt0WECu(}z!5*UJ}TNP)Lpk4y7A$hjGGBo6o%{=0J1jH>$}J0&H!;%Kk$u;gM{>iso)^1`4_tsb zMrZvhz1^kJ7FH5>lw~n2Q+=2ES>dM;7c@xmKPAJLLe4I zIHy-oe;YB8EFrsH_4*svnCg3a`Oc68iKko-WR9iB!Sm((O;5go=0^zp>Quu|xa=;B zPj8d^{O)8+Z&)=zJr%b?%wPI_;wjK`3wc*yIotjr1uj8iMZ&^A>-ByE=?`&TALa+O zrhmB&TC3nnZ}gTcYm}hPjqMMd>iW0ls*gjniT*prDb1y+-p`Zyjhju5@(^O$Lx)Co z7AMBRAM-CkFOW=x!kgY?Ulb~9meBW3 zBPk5RMc+!deYySiilF|p;G~&5Edd)d^R?NV7$`Y(Lc=gyHFLSbdtl+cYj zXiurZp%?;h!GwVK5OGc#=N~L#b@ec3gy?b^qdD=oBF_Trs z&F9mn=ApAm{|;w)lGbmP4p#JoNf-DhAlmpHSrF&eP>!5E!8FX$Xo_h{s)}d#evF;7 zcA##|IlfK4dp2GX-oVi6C)6v*t$V{a|sxo`d9&S*}u3p^gh|eWGuf&w^zNGvK7b<@Ley zKC6vhix!-CGV~32bJ}4aoUavXR#-IX?FK=o;B_ABB_6?YO<@Vnp=^a$F@XW^YLmbY z$Q3S~PzOCLY16(hsLgIj=ck%IwLbUyT+0@f+sdQ{)GtMsxn}Q>nqIw`rQ;=wUN779C}L<4OykP+4U$ltgt-IcK+CYghNTR!Fsk_+^D!cYCo>YFEDwe8p@3WdX-COsBvx_L)E@jc?@p#)`lg*T{Uud6p!a7OK;qvS7&nf zp-gwJwvA14uf*zfNx0cqS3?X6Xu4-v%)x}@iOs}{e-SJ7B^C3IGf*&fJAeDE_3j7MZ^5Q%@`+gc{%y{da`s_;N3p?)acmBU&tbN ztuw+lXZwO>1c)*+g1wxLSI$NL~J0^XS9PMCFjXBgIGD=Zc&1 z`H}5e2OoJ5JTA>G1Obel48vn12SQ1$WB!Vx?;*K3{l&l|ob99fz8=%A=r@iYh)kD+ z?oqpU7g;eG-@Bt%&Ta^FQ(g zjDr&w-cc-&{Hmsa^xH){|JCljH@wbeIN3twM5 z`2g%`ZmNna`<+y$onDhuSv;A&S!YqNsOoDq`%M7k&@MB-P?PZ7UXJ1MOhOD-xNB||bMlI#bG|RDNWiDr zV0}cslS^T(GogDRT^>2gHu0$c(HrZ#6-KwnQgwU_hV&ikrUq>DP!WyY1QWqI!Y5Uf^C(%j|Stj;U zwnD*-N<9iT^Zd2#Kb7y`i+SUcusH?KUIBdW*lv2UOs9v{A9Pv$c@65$5h_2bDc`b+ zlh3fW4>D$!m@U>m=rWCqIsEiF25!l<7%&r;Tl^j-@r~j&or@k1v|U<%Gqm$i1QUQc zwIj5uON`c(4fQU(1J?$5z(eZ~n=QsEQMQ&!{~xGZaW<~E+AqphU(}OTq~&8w3ptgt zmUlE;YHuNVR8Ev9CTLv3e&il=B_=$_78TiBvN~A#*zmvGHE~Yh$mP0<+4PV2!l*u! z3PlD~+`54b9H4HkZXqYiSl zv%&Iau*u6cWmA{cE|ZEDuV7B6tA=TU_ddD$UBR(0Nmpb2ukiyaPpayE)l6>;loFHW zcSjflv`TCuXNTDb-utM^&C$M#un)q7Pu00Ut20cx+p$7X245^EQ5zvmE{Q-$K>!Dzrz#~bZI7)h!~8i4&7F?})RL=r@${vJS}Ow=h3= z5j$(;)OZDg0_WU9WtE{mm9=`FIR^t0fBLcv{+)yfPk)4$_Xx!CK9Az0N7Ta{|#JHu@In9kO# zwLClHwd8#ZZW#`|H+Pq~M&jgQs5gRLrRN1wg&_|vN=;>ZX z^Kze059@ZVMj735YoGp50hC&!8pJ){9*)u^xtG_twiLJ9yHMtYGs+Fd)zk)^T5qb5 z62F!kHDqpjKV8VK-pHY-z6+wXQx^+0YSDGvvL;Xe&Ep=bac>--4ECaau}{0Dv@n(B z+D?Kkic2q5GLNvRKDuttWpB)yb4@EP-v<`-vqb-ZBon2Z$~v#{11rOFf^J#=yb2q+ z*iZrv&7)~z_(RDfSJalfvV?$vghEM4rMGF$S%u=ou2)wq8@V#xC91LJKVVWSqp!T8 zv7Xqds#YLg#9bkg8EO11;X>lqQOX}UQcP8C%tUx7p8&h`ULrg%D4NR*x(?%6-tZst zxaOB2@>*%k=CvV|rDOMG^knF*6`f17)rft*qiqa-e(mV|sk5VamZr zwEKg6+jr?BM&*W=E1VOoOqveJM18@yBw;~PWoWe~ zi~~97cS?Xm2Py~6zMt*~Js~^!ekd5l@?hF{?8^%YdhLa&)4voCx%cz;voGBRDZE28 zTEK=omG{;4ABPy#WW0omV9&imJ?}drA=`p79=)A+>@f2|^L2?Ii?5jR*_?=YShz+J zyJHh>pypn)Xe=jwjZHa+lvl-@Yc7B1-};&snQJ%aE_{CWXlGc1L~0c`c~CmZA8XAO z{SI#5w1O`Bd}e??FDqwr>;1%=&vR{=zoRB%fQNB#aJ7imt==rG(>*=SbK`D?+^V2V zCpsuJ5KA%l%K`*mW^xq7!F;!e(*z!WhoYbb{4rT>lW#@mzQ zk8uAkJ&kT`YN)^asT%WQF4=1Hcy*i-pe-FUdTd@Zq9_^MW}LS+v%Gv0x2Sh$>C$5s zIV(H{CeO~6zo`~~umNiP(;W{5NLz`C3pRh~+-gjmiSuOH`OhALLUk&FSJQ`UB?6tj zngy(D4%#M$J^_6PeS+SE zrDF@0eMw)p*QS&R|i3a15H?!LQ$a5*FB`j)tz4w3&XL$*KP2Bw_9t$`F26s-JzxKvrd z7=XQpTOmO1K77i(55`Qv8fo&eDu6zg+&f@Zdk*42*$Dy6Bwy2(o;S4OY?#S2YAKmk zec*iO6i4tRn{tu+*>8Jz5F~-M=!Imeb5~^^vP1)kJ!ZZ5xi6N?YhXYD6=@30RRGvm zT)Ue6ro65H?`Ja2g+yM&rx~It087AG`t2ea84JwWVwm!j+TCwObv>hQW27zHA+kA)LkJTQR8w<4y$S`TcDt~*cJ#vk`>P_6w|e2 z=XVOv0uUPfz;2DpK6%6)4jk#XUj&J>G3EA=#XEU{bIS@=L|12L4>Vtt8gNiQ2)MTB zb5%WR#Kq4qAmAWB3jl&VFP8vpFY&Aj3fw?;p0? zH!1M}d-HbYRQ*01WVoSgaLQ<=%L;qlpWXNlIOz~R3#LPK&YXf5(Keh4ZQRNHR1{S7 zZK*kZhcQ2uBE7}*M0=1OuR{vp;1HY!ybyq>mtPJlFrYxFcG@ zTdWA>wstpRzDHN+sT_2I)xhy8C0C_5&uSZk>=DPjS;|dWfCE5dbm8cvrw0O&{0pGV zfI0w%o1h=zko8?tOwu?bfKcUlL=_(b%G%(4S>?(jp#u#2nvWsGj8A;7Xq@Lh4^JI^ zDzYZN)*Ofn3*6aTAM2q&xBS6!cJ^XyUdv=2oLbZtIpF0>Ddd*nU)PPglpOG@JC+L% z#g^+E7#e!XkX?N?F++nwYD;L|$V705=%r^Yi^iVwWC^Op=xMj^`eo?QXT5dS=zHWd<%ZF>i$sg1_JFRO-2AM>(r#c{~BFY4Be@gdhEkw4A9-m-piF zEXjR=mPF#c2Jo|%{Mxws>DRu*$+GNGwZm$^hv%US`W^WUZghM~dh3R?q7f@(-SyU^ z9sggIjf(Iq_Hm=DRLi3>YDf95s13%lSE zFl0bMW8uUY?vGFUa<87bg2{7B83h6QHyy%j%64ZWlrebW_CS?;LJk)xo+m?vm%VK` zxofF)T!Zral|SJo6}pBn?&91;|zwqG7uM|gU z!x=QO02IB(XEnXanDN?g*WE;s@36$jrsxf}UQ?vgjtMo_P^d_lI)ZIjEd1Q5Q7Ej~ z;r;;rbK$ifP~8>%6k&7sh_D@$aPMXWnO(`{M!PU=qB8$WA&-u`M}?mJ))*CcSzHe! z{RU{!lqL5ADk_T%ar|MrREhb?|O2a$wkw$tEXsenn!#N>z;hTe^qJqMa2ng-)M+z+OX0heAE#bkzC*6Mm)#FnVZpqocd+JoVOo%qQuDPG0K)!6t8Ge72 z7@*d+FhKY~G0sj?7ZFyXSaF)FP@yLvZ=0OT*mia>n=OFwr7@@FQ_^4G?qyX|O8pdl<)7%5}D4AbgyGTGk*XNVXtBRtpsj0`U(e|QYuisn84gvd~r_mv5LWv(BaC$ zTck+VJx?k}Z^I5$i}(UkJ>0~4wjIV>u05<2eOT~oYZYsmK3BRuQ!IOxg=vfvVls(l zsu_>4`6Lzm+=+gQ>oqsk4?U3i^_r=l9R40T{hewN%Nu>Qn@g*ikry{9=wKYVvbF_n z*{uM%xps6eKp0GVpp^#X# zX(fJL5FxV&c9_uYu-ym7>(6(mjs#nZ^Za2ZW6j)UQx>R47T2c@^^fU>=Dz^CkPeAxmio!+i%$DKSLL3bG@7wDS9 zZC@LpN-mx5EH{BBZu$7IZh3E9)-HVQs9LCeT(hdESOw`=yd)3eh&7{W#MY}ZJg%W1 zDIHKx6LE^MZw=lbFh47mLueV-3^oQ->3HtAMWXBw2A&N5cGAbbX4^?sPXUKUUy7e= z&u8qZ+kWDB2zN6AFX5s$N`sZdPWEQa&$8%pwhHgVZd;~UB_*coP_>l@_1gKRVnWP) z4HTJ+*ONiPF7f_Av#?d~7z%$&-F*HJRJIjJ|0ra{>SCo*n6IPjR1_+HR&yh+KG<4L zfa+vBS=2nbK0Qb64MVEmNxkK((~gtfUV39CM#}G5Aj4uF8sI+5f$h4UX$hI^ zNLbgC0(uS^6=2?oTR5-f(9o4hGse%@H#9G^sZ634YdxexDqb}gk!4I4kAv>Y7=*W(o|5Q*Z7hENq zJ%(qvdOY?S_DVri z%08DXlOL=g3|+qk5|~~FS^HrbCiP3(I(yALWmVgwZmF>Ha%^<7d`Nd46F2!^NMEbQ$NmwIrJ4Iw^yy zDy}F;`$HSa6L!7?M$K2>Er6{H-XBSvLbbhQCE@OBVr5|Wz2rSJN$YLe>${-gdC4zDMHQ8@_0FlNnMwH3 zJL@RMsWHW9COl4`3jJVpxs|yo@Tu{3Qbo&xahpHU zOto-{n4FoJd0oii4?l2y^bxA$LW(!ZJl*{cmWD!jXtVKu1kOO;&k?Lj(=tEPA}DC) zpjj*SzpN^HQ!>nNsO2r;iL~`|T1p8DQ_3ucVXkeT)?^t5&%3T~H=VY>cuTq$rO!B- zu1kedx124JBJTVq&+Q-&Wrmh3M95rQ#8z-TKag2tP|48bG96#zt=S zA1d)CogLyD%t(b!3tBc~=$rWcJ(BW{Vp%Kei*uXLDc=eH)$7N?>^*%>4W@FOC%RO_ z8pqvi@?$OfeRY#h_h`7ahWq=roS2o*{U{mOakar|%T}9Gl)6-0M*{XMuIToWubqdN zMoQm57-DZ%ctO!1KM!VmE^jZC=2VPokc72|4~Ski*!g{qwF~i})a11?hSIlZ3W!Et1N;4u%{s*c9KW>(w%|xVw7{!SrDz1O6#%V$nKR`QUo~{I?k6m} z1uAwF(!FmIB~8Qoab6wX`cM*DM|E>m#4ov(OD{2K*>a2bVf-K~8+ipS!6k)13?Kdm z@j1lr`XvE_cW86Ip)LhXUa0lx5nrZTf+CTGMb1~Jug5!E*KWa%_zy)3d3qkgZ(5!z zfY6kpw72&z1t5e_p*FW?Q?G{%;E5v_<6B-TC4!eFc3cDkgQxAwxJkSFx`5;j&=j>k z%j1^24tazBP!KzWBTGEoYo=;$XX`2Z@tHn9R2>ch4m3Br=O;Kf^=ZlZ8Q^7HO9?+8 zc|OR{>#%5rm*AJciXy?7Nv~pbfCC-R{O+UEeR10UxrES?vMBH4L)GBDwG+mKblPKd zaJ_`8eF-s)s~4D|h?ed{T!VDXr?9?4|NG~l-YTJQS+GtG_Y`KwyZ7nMT&0TDezg?%~T2HM_oE_xHO9v-E5aO9ex7-RaQu0&gD+@?I?>xWEFlojt>RvIc5B33{{%Lvy z-Z_M^X78b?7D8kp2EKMRM<*VBF={Tm=%vFU)K{6Zce@Hv|E?{k+J`GnukJ^+hsgVz zl!HGa^rj!BRjKUl28GtTc^%R5V97x~cMU^?%`q`Kdtwf&KGg(x3U%+XZm=tI`Hay= zac%`q8rp%`Qi|%^_0$3#s-x7yPxOHd3Zzn_rPdMu5lr`g@oxS%IY*!6|B!QDzJlIv z&H4{>y*~7X; z!R?>Z<$O4#u@GZxcwz~20H!DfEMFhnzpSSRe4wDtyY#}POaKtlqp z`*almD)k~BzjBUJ*X1o3aY4yxQUM(j^!Kms=)U?dh!D%61oQn6LZg+3ezM4KN&yZ2 gXZ!!RfK4x%cJ7#dnbfbB{kIp$Lj%q7`wkKR3lalf@&Et; literal 0 HcmV?d00001 diff --git a/github-pages/credentials.md b/github-pages/credentials.md new file mode 100644 index 0000000..69ab56a --- /dev/null +++ b/github-pages/credentials.md @@ -0,0 +1,9 @@ +--- +layout: default +title: Credentials +description: How to obtain credentials for OneDrive, Google Drive, Dropbox, and CloudKit. +nav_order: 3 +--- + + + diff --git a/github-pages/features/index.md b/github-pages/features/index.md new file mode 100644 index 0000000..e15a9f5 --- /dev/null +++ b/github-pages/features/index.md @@ -0,0 +1,8 @@ +--- +layout: default +title: Features +description: Documentation for major modules and providers in ManagedCode.Storage. +nav_order: 5 +--- + + diff --git a/github-pages/index.md b/github-pages/index.md new file mode 100644 index 0000000..42b0888 --- /dev/null +++ b/github-pages/index.md @@ -0,0 +1,10 @@ +--- +layout: default +title: Home +description: ManagedCode.Storage documentation: cross-provider storage toolkit for .NET and ASP.NET streaming scenarios. +keywords: ManagedCode.Storage, IStorage, blob storage, .NET, ASP.NET, SignalR, Azure, AWS, GCP, OneDrive, Google Drive, Dropbox, CloudKit +is_home: true +nav_order: 1 +--- + + diff --git a/github-pages/robots.txt b/github-pages/robots.txt new file mode 100644 index 0000000..8e2eb8b --- /dev/null +++ b/github-pages/robots.txt @@ -0,0 +1,7 @@ +--- +layout: null +--- +User-agent: * +Allow: / + +Sitemap: {{ site.url }}{{ site.baseurl }}/sitemap.xml diff --git a/github-pages/setup.md b/github-pages/setup.md new file mode 100644 index 0000000..2590bd4 --- /dev/null +++ b/github-pages/setup.md @@ -0,0 +1,9 @@ +--- +layout: default +title: Setup +description: How to clone, build, and run tests for ManagedCode.Storage. +nav_order: 2 +--- + + + diff --git a/github-pages/sitemap.xml b/github-pages/sitemap.xml new file mode 100644 index 0000000..9c3eea8 --- /dev/null +++ b/github-pages/sitemap.xml @@ -0,0 +1,15 @@ +--- +layout: null +--- + + + {% for page in site.pages %} + {% if (page.url contains '.html' or page.url == '/') and page.url != '/404.html' %} + + {{ site.url }}{{ site.baseurl }}{{ page.url | remove: 'index.html' }} + {{ site.time | date: '%Y-%m-%d' }} + {% if page.url == '/' %}1.0{% else %}0.8{% endif %} + + {% endif %} + {% endfor %} + diff --git a/github-pages/templates.md b/github-pages/templates.md new file mode 100644 index 0000000..327310e --- /dev/null +++ b/github-pages/templates.md @@ -0,0 +1,8 @@ +--- +layout: default +title: Templates +description: Documentation templates used in this repository (Feature and ADR templates). +nav_order: 7 +--- + + diff --git a/github-pages/testing.md b/github-pages/testing.md new file mode 100644 index 0000000..9a7caaf --- /dev/null +++ b/github-pages/testing.md @@ -0,0 +1,8 @@ +--- +layout: default +title: Testing +description: Test strategy and how to run the ManagedCode.Storage test suite. +nav_order: 4 +--- + + From 37fd26b2d7de086f827d75b7e58b07a72cf03bf3 Mon Sep 17 00:00:00 2001 From: ksemenenko Date: Mon, 15 Dec 2025 11:38:11 +0100 Subject: [PATCH 6/6] fixes --- .github/workflows/ci.yml | 4 +- AGENTS.md | 2 + .../StorageClient.cs | 143 ++++++++++++++---- ManagedCode.Storage.Core/BaseStorage.cs | 17 ++- .../ManagedCode.Storage.Azure/AzureStorage.cs | 3 +- .../CrossProvider/CrossProviderSyncTests.cs | 7 +- .../Common/FileHelper.cs | 62 ++++++-- .../Core/StorageClientChunkTests.cs | 17 ++- .../Server/ChunkUploadServiceTests.cs | 127 ++++++++++------ .../Storages/AWS/AwsContainerFactory.cs | 4 +- .../Storages/Abstracts/StorageClientTests.cs | 14 +- .../Storages/Abstracts/UploadTests.cs | 65 ++++---- .../DropboxClientWrapperHttpTests.cs | 49 +++--- .../CloudDrive/GoogleDriveClientHttpTests.cs | 87 ++++++----- .../CloudDrive/GraphOneDriveClientTests.cs | 18 +-- .../CloudKit/FakeCloudKitHttpHandler.cs | 7 +- .../FileSystem/FileSystemSecurityTests.cs | 86 ++++++----- .../FileSystem/FileSystemUploadTests.cs | 24 +-- 18 files changed, 449 insertions(+), 287 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 3a3c230..6c8107c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -10,8 +10,8 @@ env: DOTNET_VERSION: '10.0.x' jobs: - build: - name: Build and Test + build-and-test: + name: build-and-test runs-on: ubuntu-latest steps: diff --git a/AGENTS.md b/AGENTS.md index fd8304a..56b30de 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -142,6 +142,7 @@ If no new rule is detected → do not update the file. - Nullability is enabled: annotate optional members; avoid `!` unless justified - Suffix async APIs with `Async`; keep test names aligned with existing patterns (e.g., `DownloadFile_WhenFileExists_ReturnsSuccess`) - Remove unused usings and let analyzers guide layout +- When a `foreach` loop’s first step is just transforming the iteration variable (e.g., `var y = Map(x)`), prefer mapping the sequence explicitly with `.Select(...)` so intent is clearer and analyzers stay quiet - No magic literals — extract to constants, enums, or config when it improves clarity ### Git & PRs @@ -150,6 +151,7 @@ If no new rule is detected → do not update the file. - Group related edits in one commit and avoid WIP spam - PRs should summarize impact, list touched projects, reference issues, and note new configuration or secrets - Include the `dotnet` commands you ran and add logs when CI needs context +- Keep a required CI check named `build-and-test` running on every PR and push to `main` so branch protection always receives a status (it’s worse for merges if the check is missing/never reported than if it runs and fails) ### Critical (NEVER violate) diff --git a/Integraions/ManagedCode.Storage.Client/StorageClient.cs b/Integraions/ManagedCode.Storage.Client/StorageClient.cs index caf35f2..c1a5380 100644 --- a/Integraions/ManagedCode.Storage.Client/StorageClient.cs +++ b/Integraions/ManagedCode.Storage.Client/StorageClient.cs @@ -45,7 +45,7 @@ public async Task> UploadFile(Stream stream, string apiUrl, using var formData = new MultipartFormDataContent(); formData.Add(streamContent, contentName, contentName); - var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); + using var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); if (response.IsSuccessStatusCode) return await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken); @@ -63,7 +63,7 @@ public async Task> UploadFile(FileInfo fileInfo, string api { formData.Add(streamContent, contentName, contentName); - var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); + using var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); if (response.IsSuccessStatusCode) { @@ -77,27 +77,21 @@ public async Task> UploadFile(FileInfo fileInfo, string api public async Task> UploadFile(byte[] bytes, string apiUrl, string contentName, CancellationToken cancellationToken = default) { - using (var stream = new MemoryStream()) - { - stream.Write(bytes, 0, bytes.Length); - - using var streamContent = new StreamContent(stream); - - using (var formData = new MultipartFormDataContent()) - { - formData.Add(streamContent, contentName, contentName); + using var stream = new MemoryStream(bytes, writable: false); + using var streamContent = new StreamContent(stream); + using var formData = new MultipartFormDataContent(); - var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); + formData.Add(streamContent, contentName, contentName); - if (response.IsSuccessStatusCode) - { - var result = await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken); - return result; - } + using var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); - return Result.Fail(response.StatusCode); - } + if (response.IsSuccessStatusCode) + { + var result = await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken); + return result; } + + return Result.Fail(response.StatusCode); } public async Task> UploadFile(string base64, string apiUrl, string contentName, @@ -110,7 +104,7 @@ public async Task> UploadFile(string base64, string apiUrl, formData.Add(fileContent, contentName, contentName); - var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); + using var response = await httpClient.PostAsync(apiUrl, formData, cancellationToken); if (response.IsSuccessStatusCode) return await response.Content.ReadFromJsonAsync>(cancellationToken: cancellationToken); @@ -188,11 +182,13 @@ public async Task> UploadLargeFile(Stream file, string uploadApiUrl formData.Add(new StringContent(bytesRead.ToString()), "Payload.ChunkSize"); formData.Add(new StringContent(totalChunks.ToString()), "Payload.TotalChunks"); - var response = await httpClient.PostAsync(uploadApiUrl, formData, cancellationToken); - if (!response.IsSuccessStatusCode) + using (var response = await httpClient.PostAsync(uploadApiUrl, formData, cancellationToken)) { - var message = await response.Content.ReadAsStringAsync(cancellationToken); - return Result.Fail(response.StatusCode, message); + if (!response.IsSuccessStatusCode) + { + var message = await response.Content.ReadAsStringAsync(cancellationToken); + return Result.Fail(response.StatusCode, message); + } } transmitted += bytesRead; @@ -230,7 +226,7 @@ public async Task> UploadLargeFile(Stream file, string uploadApiUrl KeepMergedFile = false }; - var mergeResult = await httpClient.PostAsJsonAsync(completeApiUrl, completePayload, cancellationToken); + using var mergeResult = await httpClient.PostAsJsonAsync(completeApiUrl, completePayload, cancellationToken); if (!mergeResult.IsSuccessStatusCode) { var message = await mergeResult.Content.ReadAsStringAsync(cancellationToken); @@ -308,14 +304,25 @@ public async Task> GetFileStream(string fileName, string apiUrl, { try { - var response = await httpClient.GetAsync($"{apiUrl}/{fileName}"); - if (response.IsSuccessStatusCode) + var response = await httpClient.GetAsync($"{apiUrl}/{fileName}", HttpCompletionOption.ResponseHeadersRead, cancellationToken); + if (!response.IsSuccessStatusCode) + { + response.Dispose(); + return Result.Fail(response.StatusCode); + } + + Stream contentStream; + try + { + contentStream = await response.Content.ReadAsStreamAsync(cancellationToken); + } + catch { - var stream = await response.Content.ReadAsStreamAsync(); - return Result.Succeed(stream); + response.Dispose(); + throw; } - return Result.Fail(response.StatusCode); + return Result.Succeed(new HttpResponseMessageStream(contentStream, response)); } catch (HttpRequestException e) when (e.StatusCode != null) { @@ -328,6 +335,82 @@ public async Task> GetFileStream(string fileName, string apiUrl, } } +file sealed class HttpResponseMessageStream(Stream innerStream, HttpResponseMessage response) : Stream +{ + private readonly Stream _innerStream = innerStream ?? throw new ArgumentNullException(nameof(innerStream)); + private readonly HttpResponseMessage _response = response ?? throw new ArgumentNullException(nameof(response)); + + public override bool CanRead => _innerStream.CanRead; + public override bool CanSeek => _innerStream.CanSeek; + public override bool CanWrite => _innerStream.CanWrite; + public override long Length => _innerStream.Length; + + public override long Position + { + get => _innerStream.Position; + set => _innerStream.Position = value; + } + + public override void Flush() => _innerStream.Flush(); + + public override Task FlushAsync(CancellationToken cancellationToken) => _innerStream.FlushAsync(cancellationToken); + + public override int Read(byte[] buffer, int offset, int count) => _innerStream.Read(buffer, offset, count); + + public override int Read(Span buffer) => _innerStream.Read(buffer); + + public override Task ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => + _innerStream.ReadAsync(buffer, offset, count, cancellationToken); + + public override ValueTask ReadAsync(Memory buffer, CancellationToken cancellationToken = default) => + _innerStream.ReadAsync(buffer, cancellationToken); + + public override long Seek(long offset, SeekOrigin origin) => _innerStream.Seek(offset, origin); + + public override void SetLength(long value) => _innerStream.SetLength(value); + + public override void Write(byte[] buffer, int offset, int count) => _innerStream.Write(buffer, offset, count); + + public override void Write(ReadOnlySpan buffer) => _innerStream.Write(buffer); + + public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken) => + _innerStream.WriteAsync(buffer, offset, count, cancellationToken); + + public override ValueTask WriteAsync(ReadOnlyMemory buffer, CancellationToken cancellationToken = default) => + _innerStream.WriteAsync(buffer, cancellationToken); + + protected override void Dispose(bool disposing) + { + if (disposing) + { + try + { + _innerStream.Dispose(); + } + finally + { + _response.Dispose(); + } + } + + base.Dispose(disposing); + } + + public override async ValueTask DisposeAsync() + { + try + { + await _innerStream.DisposeAsync(); + } + finally + { + _response.Dispose(); + } + + await base.DisposeAsync(); + } +} + file class ChunkUploadCompleteRequestDto { public string UploadId { get; set; } = string.Empty; diff --git a/ManagedCode.Storage.Core/BaseStorage.cs b/ManagedCode.Storage.Core/BaseStorage.cs index 28be9f1..eeb3508 100644 --- a/ManagedCode.Storage.Core/BaseStorage.cs +++ b/ManagedCode.Storage.Core/BaseStorage.cs @@ -110,23 +110,25 @@ public Task> UploadAsync(Stream stream, UploadOptions optio return UploadInternalAsync(stream, SetUploadOptions(options), cancellationToken); } - public Task> UploadAsync(byte[] data, UploadOptions options, CancellationToken cancellationToken = default) + public async Task> UploadAsync(byte[] data, UploadOptions options, CancellationToken cancellationToken = default) { if (string.IsNullOrWhiteSpace(options.MimeType)) options.MimeType = MimeHelper.GetMimeType(options.FileName); - return UploadInternalAsync(new MemoryStream(data), SetUploadOptions(options), cancellationToken); + using var stream = new MemoryStream(data, writable: false); + return await UploadInternalAsync(stream, SetUploadOptions(options), cancellationToken); } - public Task> UploadAsync(string content, UploadOptions options, CancellationToken cancellationToken = default) + public async Task> UploadAsync(string content, UploadOptions options, CancellationToken cancellationToken = default) { if (string.IsNullOrWhiteSpace(options.MimeType)) options.MimeType = MimeHelper.TEXT; - return UploadInternalAsync(new Utf8StringStream(content), SetUploadOptions(options), cancellationToken); + using var stream = new Utf8StringStream(content); + return await UploadInternalAsync(stream, SetUploadOptions(options), cancellationToken); } - public Task> UploadAsync(FileInfo fileInfo, UploadOptions options, CancellationToken cancellationToken = default) + public async Task> UploadAsync(FileInfo fileInfo, UploadOptions options, CancellationToken cancellationToken = default) { if (string.IsNullOrWhiteSpace(options.MimeType)) options.MimeType = MimeHelper.GetMimeType(fileInfo.Extension); @@ -136,7 +138,8 @@ public Task> UploadAsync(FileInfo fileInfo, UploadOptions o options.FileName = fileInfo.Name; } - return UploadInternalAsync(fileInfo.OpenRead(), SetUploadOptions(options), cancellationToken); + using var stream = fileInfo.OpenRead(); + return await UploadInternalAsync(stream, SetUploadOptions(options), cancellationToken); } public Task> DownloadAsync(string fileName, CancellationToken cancellationToken = default) @@ -336,4 +339,4 @@ public void Dispose() if (StorageClient is IDisposable disposable) disposable.Dispose(); } -} \ No newline at end of file +} diff --git a/Storages/ManagedCode.Storage.Azure/AzureStorage.cs b/Storages/ManagedCode.Storage.Azure/AzureStorage.cs index 575c21e..2c37777 100644 --- a/Storages/ManagedCode.Storage.Azure/AzureStorage.cs +++ b/Storages/ManagedCode.Storage.Azure/AzureStorage.cs @@ -204,9 +204,8 @@ protected override async Task DeleteDirectoryInternalAsync(string direct { var blobs = StorageClient.GetBlobs(prefix: directory, cancellationToken: cancellationToken); - foreach (var blob in blobs) + foreach (var blobClient in blobs.Select(blob => StorageClient.GetBlobClient(blob.Name))) { - var blobClient = StorageClient.GetBlobClient(blob.Name); await blobClient.DeleteIfExistsAsync(DeleteSnapshotsOption.None, null, cancellationToken); } diff --git a/Tests/ManagedCode.Storage.Tests/AspNetTests/CrossProvider/CrossProviderSyncTests.cs b/Tests/ManagedCode.Storage.Tests/AspNetTests/CrossProvider/CrossProviderSyncTests.cs index 3025b8c..901a972 100644 --- a/Tests/ManagedCode.Storage.Tests/AspNetTests/CrossProvider/CrossProviderSyncTests.cs +++ b/Tests/ManagedCode.Storage.Tests/AspNetTests/CrossProvider/CrossProviderSyncTests.cs @@ -42,10 +42,11 @@ public async Task SyncBlobAcrossProviders_PreservesPayloadAndMetadata(string sou await EnsureContainerAsync(sourceStorage); await EnsureContainerAsync(targetStorage); - var payload = new byte[256 * 1024]; - RandomNumberGenerator.Fill(payload); + var payload = new byte[256 * 1024]; + RandomNumberGenerator.Fill(payload); - var expectedCrc = Crc32Helper.CalculateStreamCrc(new MemoryStream(payload, writable: false)); + using var crcStream = new MemoryStream(payload, writable: false); + var expectedCrc = Crc32Helper.CalculateStreamCrc(crcStream); var directory = $"sync-tests/{Guid.NewGuid():N}"; var fileName = $"payload-{Guid.NewGuid():N}.bin"; diff --git a/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs b/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs index 6929b3d..a2216ba 100644 --- a/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs +++ b/Tests/ManagedCode.Storage.Tests/Common/FileHelper.cs @@ -1,6 +1,8 @@ using System; using System.IO; using System.Linq; +using System.Threading; +using System.Threading.Tasks; using ManagedCode.MimeTypes; using ManagedCode.Storage.Core.Models; using Microsoft.AspNetCore.Http; @@ -65,18 +67,17 @@ public static LocalFile GenerateLocalFileWithData(LocalFile file, int sizeInByte public static IFormFile GenerateFormFile(string fileName, int byteSize) { var localFile = GenerateLocalFile(fileName, byteSize); + var contentType = MimeHelper.GetMimeType(localFile.FileInfo.Extension); - var ms = new MemoryStream(); - localFile.FileStream.CopyTo(ms); - var formFile = new FormFile(ms, 0, ms.Length, fileName, fileName) + byte[] bytes; + using (localFile) { - Headers = new HeaderDictionary(), - ContentType = MimeHelper.GetMimeType(localFile.FileInfo.Extension) - }; - - localFile.Dispose(); + using var ms = new MemoryStream(); + localFile.FileStream.CopyTo(ms); + bytes = ms.ToArray(); + } - return formFile; + return new InMemoryFormFile(bytes, fileName, fileName, contentType); } public static string GenerateRandomFileName() @@ -94,4 +95,47 @@ public static string GenerateRandomFileContent(int charCount = 250_000) .Select(s => s[Random.Next(s.Length)]) .ToArray()); } + + private sealed class InMemoryFormFile : IFormFile + { + private readonly byte[] _content; + + public InMemoryFormFile(byte[] content, string name, string fileName, string contentType) + { + _content = content; + Name = name; + FileName = fileName; + ContentType = contentType; + Headers = new HeaderDictionary + { + { "Content-Type", contentType } + }; + ContentDisposition = $"form-data; name=\"{name}\"; filename=\"{fileName}\""; + } + + public string ContentType { get; } + public string ContentDisposition { get; } + public IHeaderDictionary Headers { get; } + public long Length => _content.Length; + public string Name { get; } + public string FileName { get; } + + public Stream OpenReadStream() => new MemoryStream(_content, writable: false); + + public void CopyTo(Stream target) + { + ArgumentNullException.ThrowIfNull(target); + + using var stream = OpenReadStream(); + stream.CopyTo(target); + } + + public async Task CopyToAsync(Stream target, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(target); + + await using var stream = OpenReadStream(); + await stream.CopyToAsync(target, cancellationToken); + } + } } diff --git a/Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs b/Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs index 3473cd4..e6ee8f7 100644 --- a/Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Core/StorageClientChunkTests.cs @@ -58,7 +58,8 @@ public async Task UploadLargeFile_WhenServerReturnsObject_ShouldParseChecksum() double? finalProgress = null; var progressEvents = new List(); - var result = await client.UploadLargeFile(new MemoryStream(payload, writable: false), UploadUrl, CompleteUrl, progress => + using var payloadStream = new MemoryStream(payload, writable: false); + var result = await client.UploadLargeFile(payloadStream, UploadUrl, CompleteUrl, progress => { progressEvents.Add(progress); finalProgress = progress; @@ -102,7 +103,8 @@ public async Task UploadLargeFile_WhenServerReturnsNumber_ShouldParseChecksum() var client = new StorageClient(httpClient); client.SetChunkSize(256 * 1024); - var result = await client.UploadLargeFile(new MemoryStream(payload, writable: false), UploadUrl, CompleteUrl, null); + using var payloadStream = new MemoryStream(payload, writable: false); + var result = await client.UploadLargeFile(payloadStream, UploadUrl, CompleteUrl, null); result.IsSuccess.ShouldBeTrue(); result.Value.ShouldBe(expectedChecksum); @@ -141,7 +143,8 @@ public async Task UploadLargeFile_WhenServerReturnsStringChecksum_ShouldParseChe ChunkSize = 128 * 1024 }; - var result = await client.UploadLargeFile(new MemoryStream(payload, writable: false), UploadUrl, CompleteUrl, null); + using var payloadStream = new MemoryStream(payload, writable: false); + var result = await client.UploadLargeFile(payloadStream, UploadUrl, CompleteUrl, null); result.IsSuccess.ShouldBeTrue(); result.Value.ShouldBe(expectedChecksum); } @@ -173,7 +176,8 @@ public async Task UploadLargeFile_WhenValueMissing_ShouldFail() ChunkSize = 64 * 1024 }; - var result = await client.UploadLargeFile(new MemoryStream(payload, writable: false), UploadUrl, CompleteUrl, null); + using var payloadStream = new MemoryStream(payload, writable: false); + var result = await client.UploadLargeFile(payloadStream, UploadUrl, CompleteUrl, null); result.IsSuccess.ShouldBeFalse(); } @@ -183,7 +187,7 @@ public async Task UploadLargeFile_WhenChunkSizeMissing_ShouldThrow() using var httpClient = new HttpClient(new RecordingHandler(_ => Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)))); var client = new StorageClient(httpClient); - Func act = () => client.UploadLargeFile(new MemoryStream(new byte[1]), UploadUrl, CompleteUrl, null); + Func act = () => client.UploadLargeFile(Stream.Null, UploadUrl, CompleteUrl, null); await Should.ThrowAsync(act); } @@ -216,7 +220,8 @@ public async Task UploadLargeFile_WhenServerReturnsZero_ShouldUseComputedChecksu ChunkSize = 128 * 1024 }; - var result = await client.UploadLargeFile(new MemoryStream(payload, writable: false), UploadUrl, CompleteUrl, null); + using var payloadStream = new MemoryStream(payload, writable: false); + var result = await client.UploadLargeFile(payloadStream, UploadUrl, CompleteUrl, null); result.IsSuccess.ShouldBeTrue(); result.Value.ShouldBe(expectedChecksum); } diff --git a/Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs b/Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs index e5117ec..1d854ff 100644 --- a/Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Server/ChunkUploadServiceTests.cs @@ -1,5 +1,6 @@ using System; using System.IO; +using System.Threading; using System.Threading.Tasks; using Shouldly; using ManagedCode.Storage.Core.Helpers; @@ -57,18 +58,18 @@ public async Task CompleteAsync_WithCommit_ShouldMergeChunksAndUpload() var chunkSize = 2048; var totalChunks = (int)Math.Ceiling(payload.Length / (double)chunkSize); - for (var i = 0; i < totalChunks; i++) - { - var sliceLength = Math.Min(chunkSize, payload.Length - (i * chunkSize)); - var slice = new byte[sliceLength]; - Array.Copy(payload, i * chunkSize, slice, 0, sliceLength); + for (var i = 0; i < totalChunks; i++) + { + var sliceLength = Math.Min(chunkSize, payload.Length - (i * chunkSize)); + var slice = new byte[sliceLength]; + Array.Copy(payload, i * chunkSize, slice, 0, sliceLength); - var formFile = CreateFormFile(slice, fileName); + using var formFile = CreateFormFile(slice, fileName); - var appendResult = await service.AppendChunkAsync(new FileUploadPayload - { - File = formFile, - Payload = new FilePayload + var appendResult = await service.AppendChunkAsync(new FileUploadPayload + { + File = formFile, + Payload = new FilePayload { UploadId = uploadId, FileName = fileName, @@ -113,15 +114,15 @@ public async Task CompleteAsync_WithCommit_ShouldMergeChunksAndUpload() [Fact] public async Task Abort_ShouldRemoveSessionArtifacts() { - var service = new ChunkUploadService(_options); - var uploadId = Guid.NewGuid().ToString("N"); - var fileName = "artifact.bin"; - var chunkBytes = new byte[] { 1, 2, 3, 4 }; - var formFile = CreateFormFile(chunkBytes, fileName); - - var append = await service.AppendChunkAsync(new FileUploadPayload - { - File = formFile, + var service = new ChunkUploadService(_options); + var uploadId = Guid.NewGuid().ToString("N"); + var fileName = "artifact.bin"; + var chunkBytes = new byte[] { 1, 2, 3, 4 }; + using var formFile = CreateFormFile(chunkBytes, fileName); + + var append = await service.AppendChunkAsync(new FileUploadPayload + { + File = formFile, Payload = new FilePayload { UploadId = uploadId, @@ -152,13 +153,13 @@ public async Task AppendChunk_WhenSessionLimitExceeded_ShouldFail() var service = new ChunkUploadService(options); - async Task Append(string uploadId) - { - var formFile = CreateFormFile(new byte[] { 1 }, "chunk.bin"); - var result = await service.AppendChunkAsync(new FileUploadPayload - { - File = formFile, - Payload = new FilePayload + async Task Append(string uploadId) + { + using var formFile = CreateFormFile(new byte[] { 1 }, "chunk.bin"); + var result = await service.AppendChunkAsync(new FileUploadPayload + { + File = formFile, + Payload = new FilePayload { UploadId = uploadId, FileName = "chunk.bin", @@ -188,15 +189,15 @@ public async Task CompleteAsync_WithLargeChunkSize_ShouldPreserveChecksum() var uploadId = Guid.NewGuid().ToString("N"); var fileName = "single-chunk.bin"; - var payload = new byte[51]; - new Random(123).NextBytes(payload); - var checksum = Crc32Helper.Calculate(payload); + var payload = new byte[51]; + new Random(123).NextBytes(payload); + var checksum = Crc32Helper.Calculate(payload); - var formFile = CreateFormFile(payload, fileName); + using var formFile = CreateFormFile(payload, fileName); - var appendResult = await service.AppendChunkAsync(new FileUploadPayload - { - File = formFile, + var appendResult = await service.AppendChunkAsync(new FileUploadPayload + { + File = formFile, Payload = new FilePayload { UploadId = uploadId, @@ -229,20 +230,46 @@ private FileSystemStorage CreateStorage() { BaseFolder = baseFolder, CreateContainerIfNotExists = true - }); - } - - private static FormFile CreateFormFile(byte[] bytes, string fileName) - { - var stream = new MemoryStream(bytes); - var formFile = new FormFile(stream, 0, bytes.Length, "File", fileName) - { - Headers = new HeaderDictionary - { - { "Content-Type", new StringValues("application/octet-stream") } - } - }; - formFile.ContentType = "application/octet-stream"; - return formFile; - } -} + }); + } + + private static DisposableFormFile CreateFormFile(byte[] bytes, string fileName) + { + return new DisposableFormFile(bytes, fileName); + } + + private sealed class DisposableFormFile : IFormFile, IDisposable + { + private readonly MemoryStream _stream; + private readonly FormFile _inner; + + public DisposableFormFile(byte[] bytes, string fileName) + { + _stream = new MemoryStream(bytes, writable: false); + _inner = new FormFile(_stream, 0, bytes.Length, "File", fileName) + { + Headers = new HeaderDictionary + { + { "Content-Type", new StringValues("application/octet-stream") } + } + }; + _inner.ContentType = "application/octet-stream"; + } + + public string ContentType => _inner.ContentType; + public string ContentDisposition => _inner.ContentDisposition; + public IHeaderDictionary Headers => _inner.Headers; + public long Length => _inner.Length; + public string Name => _inner.Name; + public string FileName => _inner.FileName; + + public Stream OpenReadStream() => _inner.OpenReadStream(); + + public void CopyTo(Stream target) => _inner.CopyTo(target); + + public Task CopyToAsync(Stream target, CancellationToken cancellationToken = default) => + _inner.CopyToAsync(target, cancellationToken); + + public void Dispose() => _stream.Dispose(); + } + } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/AWS/AwsContainerFactory.cs b/Tests/ManagedCode.Storage.Tests/Storages/AWS/AwsContainerFactory.cs index c9c09da..133b9db 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/AWS/AwsContainerFactory.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/AWS/AwsContainerFactory.cs @@ -1,3 +1,4 @@ +using System; using System.Net; using DotNet.Testcontainers.Builders; using ManagedCode.Storage.Tests.Common; @@ -18,7 +19,8 @@ public static LocalStackContainer Create() .UntilHttpRequestIsSucceeded(request => request .ForPort(EdgePort) .ForPath("/_localstack/health") - .ForStatusCode(HttpStatusCode.OK))) + .ForStatusCode(HttpStatusCode.OK), + wait => wait.WithTimeout(TimeSpan.FromMinutes(5)))) .Build(); } } diff --git a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/StorageClientTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/StorageClientTests.cs index 76827a9..f793020 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/StorageClientTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/StorageClientTests.cs @@ -22,21 +22,15 @@ public StorageClientTests() { _httpClient = new HttpClient(new FakeHttpMessageHandler(request => { - var response = new HttpResponseMessage(HttpStatusCode.OK); if (request.Method == HttpMethod.Get && request.RequestUri?.AbsoluteUri.Contains("loader.com", StringComparison.Ordinal) == true) { - var contentStream = new MemoryStream(); - using (var writer = new StreamWriter(contentStream)) + return new HttpResponseMessage(HttpStatusCode.OK) { - writer.Write("Test content"); - writer.Flush(); - contentStream.Position = 0; - } - - response.Content = new StreamContent(contentStream); + Content = new StringContent("Test content") + }; } - return response; + return new HttpResponseMessage(HttpStatusCode.OK); })); _storageClient = new StorageClient(_httpClient); diff --git a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs index c0ce567..b710b98 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/Abstracts/UploadTests.cs @@ -36,15 +36,15 @@ public async Task UploadAsync_AsText_WithoutOptions() } [Fact] - public async Task UploadAsync_AsStream_WithoutOptions() - { - // Arrange - var uploadContent = FileHelper.GenerateRandomFileContent(); - var byteArray = Encoding.ASCII.GetBytes(uploadContent); - var stream = new MemoryStream(byteArray); + public async Task UploadAsync_AsStream_WithoutOptions() + { + // Arrange + var uploadContent = FileHelper.GenerateRandomFileContent(); + var byteArray = Encoding.ASCII.GetBytes(uploadContent); + using var stream = new MemoryStream(byteArray); - // Act - var result = await Storage.UploadAsync(stream); + // Act + var result = await Storage.UploadAsync(stream); // Assert result.IsSuccess @@ -56,13 +56,14 @@ public async Task UploadAsync_AsStream_WithoutOptions() } [Fact] - public async Task StreamUploadAsyncTest() - { - var file = await GetTestFileAsync(); - var uploadResult = await Storage.UploadAsync(file.OpenRead()); - uploadResult.IsSuccess - .ShouldBeTrue(); - } + public async Task StreamUploadAsyncTest() + { + var file = await GetTestFileAsync(); + await using var stream = file.OpenRead(); + var uploadResult = await Storage.UploadAsync(stream); + uploadResult.IsSuccess + .ShouldBeTrue(); + } [Fact] public async Task ArrayUploadAsyncTest() @@ -103,14 +104,14 @@ public async Task UploadAsync_AsStream_WithOptions_ToDirectory_SpecifyingFileNam // Arrange var directory = "test-directory"; var uploadContent = FileHelper.GenerateRandomFileContent(); - var fileName = FileHelper.GenerateRandomFileName(); + var fileName = FileHelper.GenerateRandomFileName(); - var byteArray = Encoding.ASCII.GetBytes(uploadContent); - var stream = new MemoryStream(byteArray); + var byteArray = Encoding.ASCII.GetBytes(uploadContent); + using var stream = new MemoryStream(byteArray); - // Act - var result = await Storage.UploadAsync(stream, new UploadOptions { FileName = fileName, Directory = directory }); - var downloadedResult = await Storage.DownloadAsync(new DownloadOptions { FileName = fileName, Directory = directory }); + // Act + var result = await Storage.UploadAsync(stream, new UploadOptions { FileName = fileName, Directory = directory }); + var downloadedResult = await Storage.DownloadAsync(new DownloadOptions { FileName = fileName, Directory = directory }); // Assert result.IsSuccess @@ -242,12 +243,12 @@ public virtual async Task UploadAsync_LargeStream_ShouldRoundTrip(int gigabytes) [Fact] public async Task UploadAsync_WithCancellationToken_ShouldCancel() { - // Arrange - var uploadContent = FileHelper.GenerateRandomFileContent(); - var byteArray = Encoding.ASCII.GetBytes(uploadContent); - var stream = new MemoryStream(byteArray); - var cts = new CancellationTokenSource(); - cts.Cancel(); + // Arrange + var uploadContent = FileHelper.GenerateRandomFileContent(); + var byteArray = Encoding.ASCII.GetBytes(uploadContent); + using var stream = new MemoryStream(byteArray); + var cts = new CancellationTokenSource(); + cts.Cancel(); // Act var result = await Storage.UploadAsync(stream, cancellationToken: cts.Token); @@ -261,11 +262,11 @@ public async Task UploadAsync_WithCancellationToken_ShouldCancel() [Fact] public virtual async Task UploadAsync_WithCancellationToken_BigFile_ShouldCancel() { - // Arrange - var uploadContent = FileHelper.GenerateRandomFileContent((Storage is FileSystemStorage) ? 100_0000_000 : 10_0000_000); - var byteArray = Encoding.ASCII.GetBytes(uploadContent); - var stream = new MemoryStream(byteArray); - var cts = new CancellationTokenSource(); + // Arrange + var uploadContent = FileHelper.GenerateRandomFileContent((Storage is FileSystemStorage) ? 100_0000_000 : 10_0000_000); + var byteArray = Encoding.ASCII.GetBytes(uploadContent); + using var stream = new MemoryStream(byteArray); + var cts = new CancellationTokenSource(); // Act var cancellationTask = Task.Run(() => diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs index 879ee84..9224816 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/DropboxClientWrapperHttpTests.cs @@ -225,22 +225,22 @@ private async Task HandleContentAsync(HttpRequestMessage re return JsonResponse(ToMetadata(entry)); } - if (path.Equals("/2/files/download", StringComparison.OrdinalIgnoreCase)) - { - if (!_entries.TryGetValue(normalizedLower, out var entry) || entry.IsFolder) - { - return PathNotFoundError(); - } - - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(entry.Content) - }; - - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); - response.Headers.Add("Dropbox-API-Result", JsonSerializer.Serialize(ToMetadata(entry))); - return response; - } + if (path.Equals("/2/files/download", StringComparison.OrdinalIgnoreCase)) + { + if (!_entries.TryGetValue(normalizedLower, out var entry) || entry.IsFolder) + { + return PathNotFoundError(); + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(entry.Content) + { + Headers = { ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream") } + }, + Headers = { { "Dropbox-API-Result", JsonSerializer.Serialize(ToMetadata(entry)) } } + }; + } return new HttpResponseMessage(HttpStatusCode.NotFound); } @@ -379,15 +379,14 @@ private static string ParentPath(string path) }; } - private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) - { - var response = new HttpResponseMessage(statusCode) - { - Content = new StringContent(JsonSerializer.Serialize(payload)) - }; - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); - return response; - } + private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(payload); + return new HttpResponseMessage(statusCode) + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + } private static HttpResponseMessage PathNotFoundError() { diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs index bb9afe6..2d7b793 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GoogleDriveClientHttpTests.cs @@ -132,26 +132,27 @@ protected override async Task SendAsync(HttpRequestMessage return JsonResponse(ToResponse(created)); } - if (request.Method == HttpMethod.Post - && path.Equals("/upload/drive/v3/files", StringComparison.OrdinalIgnoreCase) - && query.TryGetValue("uploadType", out var uploadType) - && string.Equals(uploadType, "resumable", StringComparison.OrdinalIgnoreCase)) + if (request.Method == HttpMethod.Post + && path.Equals("/upload/drive/v3/files", StringComparison.OrdinalIgnoreCase) + && query.TryGetValue("uploadType", out var uploadType) + && string.Equals(uploadType, "resumable", StringComparison.OrdinalIgnoreCase)) { var body = await request.Content!.ReadAsStringAsync(cancellationToken); var model = JsonSerializer.Deserialize(body, new JsonSerializerOptions { PropertyNameCaseInsensitive = true }) ?? throw new InvalidOperationException("Upload initiation body is missing."); var uploadId = "upload-" + Interlocked.Increment(ref _counter); - _pendingUploads[uploadId] = new PendingUpload( - Name: model.Name ?? Guid.NewGuid().ToString("N"), - ParentId: model.Parents?.FirstOrDefault() ?? RootFolderId, - MimeType: model.MimeType ?? "application/octet-stream"); - - var response = new HttpResponseMessage(HttpStatusCode.OK); - response.Headers.Location = new Uri($"https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&upload_id={uploadId}"); - response.Content = new ByteArrayContent(Array.Empty()); - return response; - } + _pendingUploads[uploadId] = new PendingUpload( + Name: model.Name ?? Guid.NewGuid().ToString("N"), + ParentId: model.Parents?.FirstOrDefault() ?? RootFolderId, + MimeType: model.MimeType ?? "application/octet-stream"); + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Headers = { Location = new Uri($"https://www.googleapis.com/upload/drive/v3/files?uploadType=resumable&upload_id={uploadId}") }, + Content = new ByteArrayContent(Array.Empty()) + }; + } if (request.Method == HttpMethod.Put && path.Equals("/upload/drive/v3/files", StringComparison.OrdinalIgnoreCase) @@ -179,20 +180,21 @@ protected override async Task SendAsync(HttpRequestMessage return new HttpResponseMessage(HttpStatusCode.NoContent); } - if (request.Method == HttpMethod.Get && query.TryGetValue("alt", out var alt) && string.Equals(alt, "media", StringComparison.OrdinalIgnoreCase)) - { - if (!_entriesById.TryGetValue(fileId, out var entry) || entry.MimeType == FolderMimeType) - { - return new HttpResponseMessage(HttpStatusCode.NotFound); - } - - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(entry.Content) - }; - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(entry.MimeType); - return response; - } + if (request.Method == HttpMethod.Get && query.TryGetValue("alt", out var alt) && string.Equals(alt, "media", StringComparison.OrdinalIgnoreCase)) + { + if (!_entriesById.TryGetValue(fileId, out var entry) || entry.MimeType == FolderMimeType) + { + return new HttpResponseMessage(HttpStatusCode.NotFound); + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(entry.Content) + { + Headers = { ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(entry.MimeType) } + } + }; + } if (request.Method == HttpMethod.Get) { @@ -297,19 +299,22 @@ private static Dictionary ParseQuery(string? query) return result; } - private HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) - { - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); - var bytes = Encoding.UTF8.GetBytes(json); - var response = new HttpResponseMessage(statusCode) - { - Content = new ByteArrayContent(bytes) - }; - - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); - response.Content.Headers.ContentLength = bytes.LongLength; - return response; - } + private HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + var bytes = Encoding.UTF8.GetBytes(json); + return new HttpResponseMessage(statusCode) + { + Content = new ByteArrayContent(bytes) + { + Headers = + { + ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"), + ContentLength = bytes.LongLength + } + } + }; + } private sealed record Entry(string Id, string Name, string ParentId, string MimeType, byte[] Content, DateTimeOffset Created, DateTimeOffset Modified); diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs index 1cad420..7da7bc3 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs @@ -307,16 +307,14 @@ private static bool TryGetItemPath(Uri requestUri, out string path, out bool isC return true; } - private static HttpResponseMessage JsonResponse(object content, HttpStatusCode status = HttpStatusCode.OK) - { - var response = new HttpResponseMessage(status) - { - Content = new StringContent(JsonSerializer.Serialize(content, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase })) - }; - - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); - return response; - } + private static HttpResponseMessage JsonResponse(object content, HttpStatusCode status = HttpStatusCode.OK) + { + var json = JsonSerializer.Serialize(content, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); + return new HttpResponseMessage(status) + { + Content = new StringContent(json, System.Text.Encoding.UTF8, "application/json") + }; + } private bool TryHandleItemRequest(HttpRequestMessage request, out HttpResponseMessage response) { diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs index 9b47558..d7cbdf1 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudKit/FakeCloudKitHttpHandler.cs @@ -259,13 +259,10 @@ private static object ToRecordResponse(StoredRecord record) private static HttpResponseMessage JsonResponse(object payload, HttpStatusCode statusCode = HttpStatusCode.OK) { var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions { PropertyNamingPolicy = JsonNamingPolicy.CamelCase }); - var bytes = Encoding.UTF8.GetBytes(json); - var response = new HttpResponseMessage(statusCode) + return new HttpResponseMessage(statusCode) { - Content = new ByteArrayContent(bytes) + Content = new StringContent(json, Encoding.UTF8, "application/json") }; - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"); - return response; } private static HttpResponseMessage JsonResponseWithToken(Dictionary payload, string? webAuthToken, HttpStatusCode statusCode = HttpStatusCode.OK) diff --git a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemSecurityTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemSecurityTests.cs index c800ffa..cbb6e3b 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemSecurityTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemSecurityTests.cs @@ -36,32 +36,33 @@ public FileSystemSecurityTests() [InlineData("..\\..\\..\\Windows\\System32\\config\\SAM")] [InlineData("../../../../secret.txt")] [InlineData("..\\..\\sensitive.dat")] - public async Task UploadAsync_WithPathTraversal_ShouldFail(string maliciousFileName) - { - // Arrange - var stream = new MemoryStream(new byte[] { 1, 2, 3 }); - var options = new UploadOptions - { - FileName = maliciousFileName - }; + public async Task UploadAsync_WithPathTraversal_ShouldFail(string maliciousFileName) + { + // Arrange + using var stream = new MemoryStream(new byte[] { 1, 2, 3 }); + var options = new UploadOptions + { + FileName = maliciousFileName + }; // Act var result = await _storage.UploadAsync(stream, options); - // Assert - security validation should reject path traversal - result.IsFailed.ShouldBeTrue(); - result.Problem.Title.ShouldBe("UnauthorizedAccessException"); - } + // Assert - security validation should reject path traversal + result.IsFailed.ShouldBeTrue(); + result.Problem.ShouldNotBeNull(); + result.Problem.Title.ShouldBe("UnauthorizedAccessException"); + } [Fact] - public async Task UploadAsync_WithValidFileName_ShouldSucceed() - { - // Arrange - var stream = new MemoryStream(new byte[] { 1, 2, 3 }); - var options = new UploadOptions - { - FileName = "legitimate-file.txt" - }; + public async Task UploadAsync_WithValidFileName_ShouldSucceed() + { + // Arrange + using var stream = new MemoryStream(new byte[] { 1, 2, 3 }); + var options = new UploadOptions + { + FileName = "legitimate-file.txt" + }; // Act var result = await _storage.UploadAsync(stream, options); @@ -74,34 +75,35 @@ public async Task UploadAsync_WithValidFileName_ShouldSucceed() [Theory] [InlineData("../../../malicious")] [InlineData("../../outside")] - public async Task UploadAsync_WithPathTraversalInDirectory_ShouldFail( - string maliciousDirectory) - { - // Arrange - var stream = new MemoryStream(new byte[] { 1, 2, 3 }); - var options = new UploadOptions - { - FileName = "file.txt", - Directory = maliciousDirectory - }; + public async Task UploadAsync_WithPathTraversalInDirectory_ShouldFail( + string maliciousDirectory) + { + // Arrange + using var stream = new MemoryStream(new byte[] { 1, 2, 3 }); + var options = new UploadOptions + { + FileName = "file.txt", + Directory = maliciousDirectory + }; // Act var result = await _storage.UploadAsync(stream, options); - // Assert - security validation should reject path traversal - result.IsFailed.ShouldBeTrue(); - result.Problem.Title.ShouldBe("UnauthorizedAccessException"); - } + // Assert - security validation should reject path traversal + result.IsFailed.ShouldBeTrue(); + result.Problem.ShouldNotBeNull(); + result.Problem.Title.ShouldBe("UnauthorizedAccessException"); + } [Fact] - public async Task UploadAsync_WithValidDirectory_ShouldSucceed() - { - // Arrange - var stream = new MemoryStream(new byte[] { 1, 2, 3 }); - var options = new UploadOptions - { - FileName = "file.txt", - Directory = "subfolder/nested" + public async Task UploadAsync_WithValidDirectory_ShouldSucceed() + { + // Arrange + using var stream = new MemoryStream(new byte[] { 1, 2, 3 }); + var options = new UploadOptions + { + FileName = "file.txt", + Directory = "subfolder/nested" }; // Act diff --git a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs index d89529e..fd50881 100644 --- a/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs +++ b/Tests/ManagedCode.Storage.Tests/Storages/FileSystem/FileSystemUploadTests.cs @@ -23,20 +23,20 @@ protected override ServiceProvider ConfigureServices() } [Fact] - public async Task UploadAsync_AsStream_CorrectlyOverwritesFiles() - { - // Arrange + public async Task UploadAsync_AsStream_CorrectlyOverwritesFiles() + { + // Arrange - var uploadStream1 = new MemoryStream(90 * 1024); - var buffer = new byte[90 * 1024]; - var random = new Random(); - random.NextBytes(buffer); - uploadStream1.Write(buffer, 0, buffer.Length); + using var uploadStream1 = new MemoryStream(90 * 1024); + var buffer = new byte[90 * 1024]; + var random = new Random(); + random.NextBytes(buffer); + uploadStream1.Write(buffer, 0, buffer.Length); - var uploadStream2 = new MemoryStream(512); - var zeroByteBuffer = new byte[512]; - uploadStream2.Write(zeroByteBuffer); - var filenameToUse = "UploadAsync_AsStream_CorrectlyOverwritesFiles.bin"; + using var uploadStream2 = new MemoryStream(512); + var zeroByteBuffer = new byte[512]; + uploadStream2.Write(zeroByteBuffer); + var filenameToUse = "UploadAsync_AsStream_CorrectlyOverwritesFiles.bin"; var temporaryDirectory = Environment.CurrentDirectory;