diff --git a/ManagedCode.Storage.slnx b/ManagedCode.Storage.slnx
index f431447..ea3ca1a 100644
--- a/ManagedCode.Storage.slnx
+++ b/ManagedCode.Storage.slnx
@@ -16,6 +16,9 @@
+
+
+
diff --git a/README.md b/README.md
index 3cbc2a3..a614d0e 100644
--- a/README.md
+++ b/README.md
@@ -52,6 +52,66 @@ Cloud storage vendors expose distinct SDKs, option models, and authentication pa
| [ManagedCode.Storage.FileSystem](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | [](https://www.nuget.org/packages/ManagedCode.Storage.FileSystem) | Local file system implementation for hybrid or on-premises workloads. |
| [ManagedCode.Storage.Sftp](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | [](https://www.nuget.org/packages/ManagedCode.Storage.Sftp) | SFTP provider powered by SSH.NET for legacy and air-gapped environments. |
+### Configuring OneDrive, Google Drive, and Dropbox
+
+> iCloud does not expose a public file API suitable for server-side integrations, so only Microsoft, Google, and Dropbox cloud drives are covered here.
+
+**OneDrive / Microsoft Graph**
+
+1. Create an app registration in Azure Active Directory (Entra ID) and record the **Application (client) ID**, **Directory (tenant) ID**, and a **client secret**.
+2. Add the Microsoft Graph **Files.ReadWrite.All** delegated permission (or **Sites.ReadWrite.All** if you target SharePoint drives) and grant admin consent.
+3. In your ASP.NET app, acquire a token via `ClientSecretCredential` or another `TokenCredential` and pass it to `new GraphServiceClient(credential, new[] { "https://graph.microsoft.com/.default" })`.
+4. Register OneDrive storage with the Graph client and the drive/root you want to scope to:
+
+ ```csharp
+ builder.Services.AddOneDriveStorageAsDefault(options =>
+ {
+ options.GraphClient = graphClient; // from step 3
+ options.DriveId = "me"; // or a specific drive ID
+ options.RootPath = "app-data"; // folder will be created when CreateContainerIfNotExists is true
+ options.CreateContainerIfNotExists = true;
+ });
+ ```
+
+5. If you need to impersonate a specific drive item, swap `DriveId` for the drive GUID returned by Graph.
+
+**Google Drive**
+
+1. In [Google Cloud Console](https://console.cloud.google.com/), create a project and enable the **Google Drive API**.
+2. Configure an OAuth consent screen and create an **OAuth 2.0 Client ID** (Desktop or Web). Record the client ID and secret.
+3. Exchange the OAuth code for a refresh token with the `https://www.googleapis.com/auth/drive.file` scope (or broader if necessary).
+4. Add the Google Drive provider and feed the credentials to the options:
+
+ ```csharp
+ builder.Services.AddGoogleDriveStorage(options =>
+ {
+ options.ClientId = configuration["GoogleDrive:ClientId"]!;
+ options.ClientSecret = configuration["GoogleDrive:ClientSecret"]!;
+ options.RefreshToken = configuration["GoogleDrive:RefreshToken"]!;
+ options.RootFolderId = "root"; // or a shared drive folder id
+ });
+ ```
+
+5. Store tokens in user secrets or environment variables; never commit them to source control.
+
+**Dropbox**
+
+1. Create an app in the [Dropbox App Console](https://www.dropbox.com/developers/apps) and choose **Scoped access** with the **Full Dropbox** or **App folder** type.
+2. Under **Permissions**, enable `files.content.write`, `files.content.read`, and `files.metadata.write` and generate a refresh token via OAuth.
+3. Register Dropbox storage with the access credentials and a root path (use `/` for full access apps or `/Apps/` for app folders):
+
+ ```csharp
+ builder.Services.AddDropboxStorage(options =>
+ {
+ options.AppKey = configuration["Dropbox:AppKey"]!;
+ options.AppSecret = configuration["Dropbox:AppSecret"]!;
+ options.RefreshToken = configuration["Dropbox:RefreshToken"]!;
+ options.RootPath = "/apps/my-app";
+ });
+ ```
+
+4. Dropbox issues short-lived access tokens from refresh tokens; the SDK handles the exchange automatically once configured.
+
### ASP.NET & Clients
| Package | Latest | Description |
diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs
new file mode 100644
index 0000000..0bcab55
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxClientWrapper.cs
@@ -0,0 +1,154 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Dropbox.Api;
+using Dropbox.Api.Files;
+
+namespace ManagedCode.Storage.Dropbox.Clients;
+
+public class DropboxClientWrapper : IDropboxClientWrapper
+{
+ private readonly DropboxClient _client;
+
+ public DropboxClientWrapper(DropboxClient client)
+ {
+ _client = client ?? throw new ArgumentNullException(nameof(client));
+ }
+
+ public async Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ if (string.IsNullOrWhiteSpace(rootPath))
+ {
+ return;
+ }
+
+ var normalized = Normalize(rootPath);
+ try
+ {
+ await _client.Files.GetMetadataAsync(normalized);
+ }
+ catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound)
+ {
+ if (!createIfNotExists)
+ {
+ return;
+ }
+
+ await _client.Files.CreateFolderV2Async(normalized, autorename: false);
+ }
+ }
+
+ public async Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, path);
+ var uploaded = await _client.Files.UploadAsync(fullPath, WriteMode.Overwrite.Instance, body: content);
+ var metadata = (await _client.Files.GetMetadataAsync(uploaded.PathLower)).AsFile;
+ return ToItem(metadata);
+ }
+
+ public async Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, path);
+ var response = await _client.Files.DownloadAsync(fullPath);
+ return await response.GetContentAsStreamAsync();
+ }
+
+ public async Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, path);
+ await _client.Files.DeleteV2Async(fullPath);
+ return true;
+ }
+
+ public async Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, path);
+ try
+ {
+ await _client.Files.GetMetadataAsync(fullPath);
+ return true;
+ }
+ catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound)
+ {
+ return false;
+ }
+ }
+
+ public async Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, path);
+ try
+ {
+ var metadata = await _client.Files.GetMetadataAsync(fullPath);
+ return metadata.IsFile ? ToItem(metadata.AsFile) : null;
+ }
+ catch (ApiException ex) when (ex.ErrorResponse.IsPath && ex.ErrorResponse.AsPath.Value.IsNotFound)
+ {
+ return null;
+ }
+ }
+
+ public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ var fullPath = Combine(rootPath, directory ?? string.Empty);
+ var list = await _client.Files.ListFolderAsync(fullPath);
+ foreach (var item in list.Entries)
+ {
+ if (item.IsFile)
+ {
+ yield return ToItem(item.AsFile);
+ }
+ }
+
+ while (list.HasMore)
+ {
+ list = await _client.Files.ListFolderContinueAsync(list.Cursor);
+ foreach (var item in list.Entries)
+ {
+ if (item.IsFile)
+ {
+ yield return ToItem(item.AsFile);
+ }
+ }
+ }
+ }
+
+ private static DropboxItemMetadata ToItem(FileMetadata file)
+ {
+ return new DropboxItemMetadata
+ {
+ Name = file.Name,
+ Path = file.PathLower ?? file.PathDisplay ?? string.Empty,
+ Size = file.Size,
+ ClientModified = file.ClientModified,
+ ServerModified = file.ServerModified
+ };
+ }
+
+ private static string Normalize(string path)
+ {
+ var normalized = path.Replace("\\", "/");
+ if (!normalized.StartsWith('/'))
+ {
+ normalized = "/" + normalized;
+ }
+
+ return normalized.TrimEnd('/') == string.Empty ? "/" : normalized.TrimEnd('/');
+ }
+
+ private static string Combine(string root, string path)
+ {
+ var normalizedRoot = Normalize(root);
+ var normalizedPath = path.Replace("\\", "/").Trim('/');
+ if (string.IsNullOrWhiteSpace(normalizedPath))
+ {
+ return normalizedRoot;
+ }
+
+ return normalizedRoot.EndsWith("/") ? normalizedRoot + normalizedPath : normalizedRoot + "/" + normalizedPath;
+ }
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs
new file mode 100644
index 0000000..0acfe36
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/Clients/DropboxItemMetadata.cs
@@ -0,0 +1,12 @@
+using System;
+
+namespace ManagedCode.Storage.Dropbox.Clients;
+
+public class DropboxItemMetadata
+{
+ public required string Name { get; set; }
+ public required string Path { get; set; }
+ public ulong Size { get; set; }
+ public DateTime ClientModified { get; set; }
+ public DateTime ServerModified { get; set; }
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs b/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs
new file mode 100644
index 0000000..9027f96
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/Clients/IDropboxClientWrapper.cs
@@ -0,0 +1,24 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using Dropbox.Api.Files;
+
+namespace ManagedCode.Storage.Dropbox.Clients;
+
+public interface IDropboxClientWrapper
+{
+ Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken);
+
+ Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken);
+
+ Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken);
+
+ Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken);
+
+ Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken);
+
+ Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken);
+
+ IAsyncEnumerable ListAsync(string rootPath, string? directory, CancellationToken cancellationToken);
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs
new file mode 100644
index 0000000..55db4e5
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorage.cs
@@ -0,0 +1,242 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using ManagedCode.Communication;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Models;
+using ManagedCode.Storage.Dropbox.Clients;
+using ManagedCode.Storage.Dropbox.Options;
+using Microsoft.Extensions.Logging;
+
+namespace ManagedCode.Storage.Dropbox;
+
+public class DropboxStorage : BaseStorage, IDropboxStorage
+{
+ private readonly ILogger? _logger;
+
+ public DropboxStorage(DropboxStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions)
+ {
+ _logger = logger;
+ }
+
+ protected override IDropboxClientWrapper CreateStorageClient()
+ {
+ if (StorageOptions.Client != null)
+ {
+ return StorageOptions.Client;
+ }
+
+ if (StorageOptions.DropboxClient != null)
+ {
+ return new DropboxClientWrapper(StorageOptions.DropboxClient);
+ }
+
+ throw new InvalidOperationException("Dropbox client is not configured for storage.");
+ }
+
+ protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await StorageClient.EnsureRootAsync(StorageOptions.RootPath, StorageOptions.CreateContainerIfNotExists, cancellationToken);
+ IsContainerCreated = true;
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override Task RemoveContainerAsync(CancellationToken cancellationToken = default)
+ {
+ // Dropbox API does not expose a direct container deletion concept; callers manage folders explicitly.
+ return Task.FromResult(Result.Succeed());
+ }
+
+ protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var normalizedDirectory = NormalizeRelativePath(directory);
+
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken))
+ {
+ var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}";
+ await StorageClient.DeleteAsync(StorageOptions.RootPath, path!, cancellationToken);
+ }
+
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var uploaded = await StorageClient.UploadAsync(StorageOptions.RootPath, path, stream, options.MimeType, cancellationToken);
+ return Result.Succeed(ToBlobMetadata(uploaded, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var remoteStream = await StorageClient.DownloadAsync(StorageOptions.RootPath, path, cancellationToken);
+
+ await using (remoteStream)
+ await using (var fileStream = localFile.FileStream)
+ {
+ await remoteStream.CopyToAsync(fileStream, cancellationToken);
+ fileStream.Position = 0;
+ }
+
+ return Result.Succeed(localFile);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var deleted = await StorageClient.DeleteAsync(StorageOptions.RootPath, path, cancellationToken);
+ return Result.Succeed(deleted);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var exists = await StorageClient.ExistsAsync(StorageOptions.RootPath, path, cancellationToken);
+ return Result.Succeed(exists);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var item = await StorageClient.GetMetadataAsync(StorageOptions.RootPath, path, cancellationToken);
+ if (item == null)
+ {
+ return Result.Fail(new FileNotFoundException(path));
+ }
+
+ return Result.Succeed(ToBlobMetadata(item, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ await EnsureContainerExist(cancellationToken);
+ var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!);
+
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.RootPath, normalizedDirectory, cancellationToken))
+ {
+ var fullPath = normalizedDirectory == null ? item.Name : $"{normalizedDirectory}/{item.Name}";
+ yield return ToBlobMetadata(item, fullPath);
+ }
+ }
+
+ public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(fileName);
+ var stream = await StorageClient.DownloadAsync(StorageOptions.RootPath, path, cancellationToken);
+ return Result.Succeed(stream);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult(Result.Succeed());
+ }
+
+ protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult(Result.Succeed(false));
+ }
+
+ private string BuildFullPath(string? relativePath)
+ {
+ var normalized = NormalizeRelativePath(relativePath ?? string.Empty);
+ return string.IsNullOrWhiteSpace(StorageOptions.RootPath)
+ ? normalized
+ : string.IsNullOrWhiteSpace(normalized) ? StorageOptions.RootPath.Trim('/') : $"{StorageOptions.RootPath.Trim('/')}/{normalized}";
+ }
+
+ private static string NormalizeRelativePath(string path)
+ {
+ return path.Replace("\\", "/").Trim('/');
+ }
+
+ private BlobMetadata ToBlobMetadata(DropboxItemMetadata file, string fullName)
+ {
+ return new BlobMetadata
+ {
+ Name = file.Name,
+ FullName = fullName,
+ Container = StorageOptions.RootPath,
+ Uri = new Uri($"https://www.dropbox.com/home/{file.Path.Trim('/')}", UriKind.RelativeOrAbsolute),
+ CreatedOn = file.ClientModified,
+ LastModified = file.ServerModified,
+ Length = file.Size,
+ MimeType = file.Name
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs
new file mode 100644
index 0000000..d464877
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/DropboxStorageProvider.cs
@@ -0,0 +1,38 @@
+using System;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Extensions;
+using ManagedCode.Storage.Core.Providers;
+using ManagedCode.Storage.Dropbox.Options;
+using Microsoft.Extensions.Logging;
+
+namespace ManagedCode.Storage.Dropbox;
+
+public class DropboxStorageProvider(IServiceProvider serviceProvider, DropboxStorageOptions defaultOptions) : IStorageProvider
+{
+ public Type StorageOptionsType => typeof(DropboxStorageOptions);
+
+ public TStorage CreateStorage(TOptions options)
+ where TStorage : class, IStorage
+ where TOptions : class, IStorageOptions
+ {
+ if (options is not DropboxStorageOptions dropboxOptions)
+ {
+ throw new ArgumentException($"Options must be of type {typeof(DropboxStorageOptions)}", nameof(options));
+ }
+
+ var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger;
+ var storage = new DropboxStorage(dropboxOptions, logger);
+ return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}");
+ }
+
+ public IStorageOptions GetDefaultOptions()
+ {
+ return new DropboxStorageOptions
+ {
+ RootPath = defaultOptions.RootPath,
+ DropboxClient = defaultOptions.DropboxClient,
+ Client = defaultOptions.Client,
+ CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs b/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs
new file mode 100644
index 0000000..6edd08a
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/IDropboxStorage.cs
@@ -0,0 +1,9 @@
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Dropbox.Clients;
+using ManagedCode.Storage.Dropbox.Options;
+
+namespace ManagedCode.Storage.Dropbox;
+
+public interface IDropboxStorage : IStorage
+{
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj
new file mode 100644
index 0000000..349d40d
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/ManagedCode.Storage.Dropbox.csproj
@@ -0,0 +1,19 @@
+
+
+ true
+
+
+ ManagedCode.Storage.Dropbox
+ ManagedCode.Storage.Dropbox
+ Dropbox provider for ManagedCode.Storage.
+ managedcode, storage, dropbox
+
+
+
+
+
+
+
+
+
+
diff --git a/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs
new file mode 100644
index 0000000..0a504e7
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/Options/DropboxStorageOptions.cs
@@ -0,0 +1,16 @@
+using Dropbox.Api;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Dropbox.Clients;
+
+namespace ManagedCode.Storage.Dropbox.Options;
+
+public class DropboxStorageOptions : IStorageOptions
+{
+ public IDropboxClientWrapper? Client { get; set; }
+
+ public DropboxClient? DropboxClient { get; set; }
+
+ public string RootPath { get; set; } = string.Empty;
+
+ public bool CreateContainerIfNotExists { get; set; } = true;
+}
diff --git a/Storages/ManagedCode.Storage.Dropbox/PLAN.md b/Storages/ManagedCode.Storage.Dropbox/PLAN.md
new file mode 100644
index 0000000..dac26f5
--- /dev/null
+++ b/Storages/ManagedCode.Storage.Dropbox/PLAN.md
@@ -0,0 +1,8 @@
+# Dropbox integration plan
+
+- [x] Reference the official `Dropbox.Api` SDK and expose injection through `DropboxStorageOptions`.
+- [x] Implement `IDropboxClientWrapper` with a wrapper over `DropboxClient` that aligns with documented upload, download, list, and metadata APIs.
+- [x] Connect `DropboxStorage` to the shared abstractions and normalize path handling for custom root prefixes.
+- [ ] Add user guidance for creating an app in Dropbox, generating access tokens, and scoping permissions for file access.
+- [ ] Build mocks for `IDropboxClientWrapper` that mirror Dropbox metadata shapes so tests can validate uploads, downloads, and deletions without network calls.
+- [ ] Provide DI samples (keyed and default) so ASP.NET apps can register Dropbox storage with configuration-bound options.
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs
new file mode 100644
index 0000000..2dacdc4
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/Clients/GoogleDriveClient.cs
@@ -0,0 +1,161 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Google.Apis.Drive.v3;
+using DriveFile = Google.Apis.Drive.v3.Data.File;
+
+namespace ManagedCode.Storage.GoogleDrive.Clients;
+
+public class GoogleDriveClient : IGoogleDriveClient
+{
+ private readonly DriveService _driveService;
+
+ public GoogleDriveClient(DriveService driveService)
+ {
+ _driveService = driveService ?? throw new ArgumentNullException(nameof(driveService));
+ }
+
+ public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ // Google Drive root exists by default when using "root". Additional folder tree is created on demand in UploadAsync.
+ return Task.CompletedTask;
+ }
+
+ public async Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var (parentId, fileName) = await EnsureParentFolderAsync(rootFolderId, path, cancellationToken);
+
+ var fileMetadata = new DriveFile
+ {
+ Name = fileName,
+ Parents = new List { parentId }
+ };
+
+ var request = _driveService.Files.Create(fileMetadata, content, contentType ?? "application/octet-stream");
+ request.Fields = "id,name,parents,createdTime,modifiedTime,md5Checksum,size";
+ return await request.UploadAsync(cancellationToken).ContinueWith(async _ => await _driveService.Files.Get(request.ResponseBody.Id).ExecuteAsync(cancellationToken)).Unwrap();
+ }
+
+ public async Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ var file = await FindFileByPathAsync(rootFolderId, path, cancellationToken) ?? throw new FileNotFoundException(path);
+ var stream = new MemoryStream();
+ await _driveService.Files.Get(file.Id).DownloadAsync(stream, cancellationToken);
+ stream.Position = 0;
+ return stream;
+ }
+
+ public async Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ var file = await FindFileByPathAsync(rootFolderId, path, cancellationToken);
+ if (file == null)
+ {
+ return false;
+ }
+
+ await _driveService.Files.Delete(file.Id).ExecuteAsync(cancellationToken);
+ return true;
+ }
+
+ public async Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return await FindFileByPathAsync(rootFolderId, path, cancellationToken) != null;
+ }
+
+ public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return FindFileByPathAsync(rootFolderId, path, cancellationToken);
+ }
+
+ public async IAsyncEnumerable ListAsync(string rootFolderId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ var parentId = string.IsNullOrWhiteSpace(directory)
+ ? rootFolderId
+ : await EnsureFolderPathAsync(rootFolderId, directory!, false, cancellationToken) ?? rootFolderId;
+
+ var request = _driveService.Files.List();
+ request.Q = $"'{parentId}' in parents and trashed=false";
+ request.Fields = "files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)";
+
+ do
+ {
+ var response = await request.ExecuteAsync(cancellationToken);
+ foreach (var file in response.Files ?? Enumerable.Empty())
+ {
+ yield return file;
+ }
+
+ request.PageToken = response.NextPageToken;
+ } while (!string.IsNullOrEmpty(request.PageToken) && !cancellationToken.IsCancellationRequested);
+ }
+
+ private async Task<(string ParentId, string Name)> EnsureParentFolderAsync(string rootFolderId, string fullPath, CancellationToken cancellationToken)
+ {
+ var normalizedPath = fullPath.Replace("\\", "/").Trim('/');
+ var segments = normalizedPath.Split('/', StringSplitOptions.RemoveEmptyEntries);
+ if (segments.Length == 0)
+ {
+ return (rootFolderId, Guid.NewGuid().ToString("N"));
+ }
+
+ var parentPath = string.Join('/', segments.Take(segments.Length - 1));
+ var parentId = await EnsureFolderPathAsync(rootFolderId, parentPath, true, cancellationToken) ?? rootFolderId;
+ return (parentId, segments.Last());
+ }
+
+ private async Task EnsureFolderPathAsync(string rootFolderId, string path, bool createIfMissing, CancellationToken cancellationToken)
+ {
+ var currentId = rootFolderId;
+ foreach (var segment in path.Split('/', StringSplitOptions.RemoveEmptyEntries))
+ {
+ var folder = await FindChildAsync(currentId, segment, cancellationToken);
+ if (folder == null)
+ {
+ if (!createIfMissing)
+ {
+ return null;
+ }
+
+ var metadata = new DriveFile { Name = segment, MimeType = "application/vnd.google-apps.folder", Parents = new List { currentId } };
+ folder = await _driveService.Files.Create(metadata).ExecuteAsync(cancellationToken);
+ }
+
+ currentId = folder.Id;
+ }
+
+ return currentId;
+ }
+
+ private async Task FindChildAsync(string parentId, string name, CancellationToken cancellationToken)
+ {
+ var request = _driveService.Files.List();
+ request.Q = $"'{parentId}' in parents and name='{name}' and trashed=false";
+ request.Fields = "files(id,name,parents,createdTime,modifiedTime,md5Checksum,size,mimeType)";
+ var response = await request.ExecuteAsync(cancellationToken);
+ return response.Files?.FirstOrDefault();
+ }
+
+ private async Task FindFileByPathAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ var normalizedPath = path.Replace("\\", "/").Trim('/');
+ var segments = normalizedPath.Split('/', StringSplitOptions.RemoveEmptyEntries);
+ if (segments.Length == 0)
+ {
+ return null;
+ }
+
+ var parentPath = string.Join('/', segments.Take(segments.Length - 1));
+ var fileName = segments.Last();
+ var parentId = await EnsureFolderPathAsync(rootFolderId, parentPath, false, cancellationToken);
+ if (parentId == null)
+ {
+ return null;
+ }
+
+ return await FindChildAsync(parentId, fileName, cancellationToken);
+ }
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs b/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs
new file mode 100644
index 0000000..5ac2632
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/Clients/IGoogleDriveClient.cs
@@ -0,0 +1,24 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using DriveFile = Google.Apis.Drive.v3.Data.File;
+
+namespace ManagedCode.Storage.GoogleDrive.Clients;
+
+public interface IGoogleDriveClient
+{
+ Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken);
+
+ Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken);
+
+ Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken);
+
+ Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken);
+
+ Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken);
+
+ Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken);
+
+ IAsyncEnumerable ListAsync(string rootFolderId, string? directory, CancellationToken cancellationToken);
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs
new file mode 100644
index 0000000..2d6d941
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorage.cs
@@ -0,0 +1,253 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using ManagedCode.Communication;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Models;
+using ManagedCode.Storage.GoogleDrive.Clients;
+using ManagedCode.Storage.GoogleDrive.Options;
+using Microsoft.Extensions.Logging;
+using File = Google.Apis.Drive.v3.Data.File;
+
+namespace ManagedCode.Storage.GoogleDrive;
+
+public class GoogleDriveStorage : BaseStorage, IGoogleDriveStorage
+{
+ private readonly ILogger? _logger;
+
+ public GoogleDriveStorage(GoogleDriveStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions)
+ {
+ _logger = logger;
+ }
+
+ protected override IGoogleDriveClient CreateStorageClient()
+ {
+ if (StorageOptions.Client != null)
+ {
+ return StorageOptions.Client;
+ }
+
+ if (StorageOptions.DriveService != null)
+ {
+ return new GoogleDriveClient(StorageOptions.DriveService);
+ }
+
+ throw new InvalidOperationException("DriveService client is not configured for Google Drive storage.");
+ }
+
+ protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await StorageClient.EnsureRootAsync(StorageOptions.RootFolderId, StorageOptions.CreateContainerIfNotExists, cancellationToken);
+ IsContainerCreated = true;
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override Task RemoveContainerAsync(CancellationToken cancellationToken = default)
+ {
+ // Root folder cleanup is not performed automatically; leave underlying Drive content intact.
+ return Task.FromResult(Result.Succeed());
+ }
+
+ protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var normalizedDirectory = NormalizeRelativePath(directory);
+
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken))
+ {
+ if (item.MimeType == "application/vnd.google-apps.folder")
+ {
+ continue;
+ }
+
+ var path = string.IsNullOrWhiteSpace(normalizedDirectory) ? item.Name : $"{normalizedDirectory}/{item.Name}";
+ await StorageClient.DeleteAsync(StorageOptions.RootFolderId, path!, cancellationToken);
+ }
+
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var uploaded = await StorageClient.UploadAsync(StorageOptions.RootFolderId, path, stream, options.MimeType, cancellationToken);
+ return Result.Succeed(ToBlobMetadata(uploaded, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var remoteStream = await StorageClient.DownloadAsync(StorageOptions.RootFolderId, path, cancellationToken);
+
+ await using (remoteStream)
+ await using (var fileStream = localFile.FileStream)
+ {
+ await remoteStream.CopyToAsync(fileStream, cancellationToken);
+ fileStream.Position = 0;
+ }
+
+ return Result.Succeed(localFile);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var deleted = await StorageClient.DeleteAsync(StorageOptions.RootFolderId, path, cancellationToken);
+ return Result.Succeed(deleted);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var exists = await StorageClient.ExistsAsync(StorageOptions.RootFolderId, path, cancellationToken);
+ return Result.Succeed(exists);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var item = await StorageClient.GetMetadataAsync(StorageOptions.RootFolderId, path, cancellationToken);
+ return item == null
+ ? Result.Fail(new FileNotFoundException($"File '{path}' not found in Google Drive."))
+ : Result.Succeed(ToBlobMetadata(item, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ await EnsureContainerExist(cancellationToken);
+ var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!);
+
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.RootFolderId, normalizedDirectory, cancellationToken))
+ {
+ if (item.MimeType == "application/vnd.google-apps.folder")
+ {
+ continue;
+ }
+
+ var fullPath = normalizedDirectory == null ? item.Name! : $"{normalizedDirectory}/{item.Name}";
+ yield return ToBlobMetadata(item, fullPath);
+ }
+ }
+
+ public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(fileName);
+ var stream = await StorageClient.DownloadAsync(StorageOptions.RootFolderId, path, cancellationToken);
+ return Result.Succeed(stream);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult(Result.Succeed());
+ }
+
+ protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ return Task.FromResult(Result.Succeed(false));
+ }
+
+ private string BuildFullPath(string? relativePath)
+ {
+ var normalized = NormalizeRelativePath(relativePath ?? string.Empty);
+ return normalized;
+ }
+
+ private static string NormalizeRelativePath(string path)
+ {
+ return path.Replace("\\", "/").Trim('/');
+ }
+
+ private BlobMetadata ToBlobMetadata(File file, string fullName)
+ {
+ return new BlobMetadata
+ {
+ Name = file.Name ?? Path.GetFileName(fullName),
+ FullName = fullName,
+ Container = StorageOptions.RootFolderId,
+ Uri = file.WebViewLink != null ? new Uri(file.WebViewLink) : null,
+ CreatedOn = file.CreatedTimeDateTimeOffset ?? DateTimeOffset.UtcNow,
+ LastModified = file.ModifiedTimeDateTimeOffset ?? DateTimeOffset.UtcNow,
+ Length = (ulong)(file.Size ?? 0),
+ MimeType = file.MimeType,
+ Metadata = new Dictionary
+ {
+ {"Id", file.Id ?? string.Empty},
+ {"Md5", file.Md5Checksum ?? string.Empty}
+ }
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs
new file mode 100644
index 0000000..45734a4
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/GoogleDriveStorageProvider.cs
@@ -0,0 +1,38 @@
+using System;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Extensions;
+using ManagedCode.Storage.Core.Providers;
+using ManagedCode.Storage.GoogleDrive.Options;
+using Microsoft.Extensions.Logging;
+
+namespace ManagedCode.Storage.GoogleDrive;
+
+public class GoogleDriveStorageProvider(IServiceProvider serviceProvider, GoogleDriveStorageOptions defaultOptions) : IStorageProvider
+{
+ public Type StorageOptionsType => typeof(GoogleDriveStorageOptions);
+
+ public TStorage CreateStorage(TOptions options)
+ where TStorage : class, IStorage
+ where TOptions : class, IStorageOptions
+ {
+ if (options is not GoogleDriveStorageOptions driveOptions)
+ {
+ throw new ArgumentException($"Options must be of type {typeof(GoogleDriveStorageOptions)}", nameof(options));
+ }
+
+ var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger;
+ var storage = new GoogleDriveStorage(driveOptions, logger);
+ return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}");
+ }
+
+ public IStorageOptions GetDefaultOptions()
+ {
+ return new GoogleDriveStorageOptions
+ {
+ RootFolderId = defaultOptions.RootFolderId,
+ DriveService = defaultOptions.DriveService,
+ Client = defaultOptions.Client,
+ CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs b/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs
new file mode 100644
index 0000000..55229ac
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/IGoogleDriveStorage.cs
@@ -0,0 +1,9 @@
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.GoogleDrive.Clients;
+using ManagedCode.Storage.GoogleDrive.Options;
+
+namespace ManagedCode.Storage.GoogleDrive;
+
+public interface IGoogleDriveStorage : IStorage
+{
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj
new file mode 100644
index 0000000..e6ada89
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/ManagedCode.Storage.GoogleDrive.csproj
@@ -0,0 +1,19 @@
+
+
+ true
+
+
+ ManagedCode.Storage.GoogleDrive
+ ManagedCode.Storage.GoogleDrive
+ Google Drive provider for ManagedCode.Storage.
+ managedcode, storage, google drive
+
+
+
+
+
+
+
+
+
+
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs b/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs
new file mode 100644
index 0000000..91c3b15
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/Options/GoogleDriveStorageOptions.cs
@@ -0,0 +1,16 @@
+using Google.Apis.Drive.v3;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.GoogleDrive.Clients;
+
+namespace ManagedCode.Storage.GoogleDrive.Options;
+
+public class GoogleDriveStorageOptions : IStorageOptions
+{
+ public IGoogleDriveClient? Client { get; set; }
+
+ public DriveService? DriveService { get; set; }
+
+ public string RootFolderId { get; set; } = "root";
+
+ public bool CreateContainerIfNotExists { get; set; } = true;
+}
diff --git a/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md b/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md
new file mode 100644
index 0000000..f881f18
--- /dev/null
+++ b/Storages/ManagedCode.Storage.GoogleDrive/PLAN.md
@@ -0,0 +1,8 @@
+# Google Drive integration plan
+
+- [x] Reference the official `Google.Apis.Drive.v3` client and thread it through `GoogleDriveStorageOptions`.
+- [x] Build `IGoogleDriveClient` with a Drive-service backed implementation that honors folder hierarchies, metadata fields, and official upload/download patterns.
+- [x] Adapt `GoogleDriveStorage` to produce `BlobMetadata` results and operate through the shared `BaseStorage` contract.
+- [ ] Provide quick-start instructions for OAuth client configuration, service account usage, and refresh-token setup for console and ASP.NET apps.
+- [ ] Expand tests with deterministic `IGoogleDriveClient` fakes that simulate Drive folder traversal, file uploads, range downloads, deletions, and metadata fetches.
+- [ ] Add docs showing the minimal Drive scopes (`https://www.googleapis.com/auth/drive.file`) and how to inject authenticated `DriveService` instances via DI.
diff --git a/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs
new file mode 100644
index 0000000..42228f7
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/Clients/GraphOneDriveClient.cs
@@ -0,0 +1,220 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Graph;
+using Microsoft.Graph.Models;
+using Microsoft.Graph.Models.ODataErrors;
+
+namespace ManagedCode.Storage.OneDrive.Clients;
+
+public class GraphOneDriveClient : IOneDriveClient
+{
+ private readonly GraphServiceClient _graphServiceClient;
+
+ public GraphOneDriveClient(GraphServiceClient graphServiceClient)
+ {
+ _graphServiceClient = graphServiceClient ?? throw new ArgumentNullException(nameof(graphServiceClient));
+ }
+
+ public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ return EnsureRootInternalAsync(driveId, rootPath, createIfNotExists, cancellationToken);
+ }
+
+ public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ return UploadInternalAsync(driveId, path, content, contentType, cancellationToken);
+ }
+
+ public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return DownloadInternalAsync(driveId, path, cancellationToken);
+ }
+
+ public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return DeleteInternalAsync(driveId, path, cancellationToken);
+ }
+
+ public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return ExistsInternalAsync(driveId, path, cancellationToken);
+ }
+
+ public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return GetMetadataInternalAsync(driveId, path, cancellationToken);
+ }
+
+ public IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken)
+ {
+ return ListInternalAsync(driveId, directory, cancellationToken);
+ }
+
+ private async Task EnsureRootInternalAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ var normalizedRoot = NormalizePath(rootPath);
+ if (string.IsNullOrWhiteSpace(normalizedRoot) || normalizedRoot == "/")
+ {
+ return;
+ }
+
+ var root = await GetRootDriveItemAsync(driveId, cancellationToken).ConfigureAwait(false);
+ var parentId = root.Id ?? throw new InvalidOperationException("Drive root is unavailable for the configured account.");
+ var segments = normalizedRoot.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
+ foreach (var segment in segments)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ var existing = await FindChildAsync(driveId, parentId, segment, cancellationToken).ConfigureAwait(false);
+ if (existing != null)
+ {
+ parentId = existing.Id!;
+ continue;
+ }
+
+ if (!createIfNotExists)
+ {
+ throw new DirectoryNotFoundException($"Folder '{normalizedRoot}' is missing in the configured drive.");
+ }
+
+ var childrenBuilder = await GetChildrenBuilderAsync(driveId, parentId, cancellationToken).ConfigureAwait(false);
+ var created = await childrenBuilder.PostAsync(new DriveItem
+ {
+ Name = segment,
+ Folder = new Folder()
+ }, cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ parentId = created?.Id ?? throw new InvalidOperationException($"Failed to create OneDrive folder '{segment}'.");
+ }
+ }
+
+ private async Task UploadInternalAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false);
+ var request = rootBuilder.ItemWithPath(NormalizePath(path)).Content;
+ var response = await request.PutAsync(content, cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ return response ?? throw new InvalidOperationException("Graph upload returned no item.");
+ }
+
+ private async Task DownloadInternalAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false);
+ var request = rootBuilder.ItemWithPath(NormalizePath(path)).Content;
+ var stream = await request.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+ return stream ?? throw new FileNotFoundException($"File '{path}' not found in OneDrive.");
+ }
+
+ private async Task DeleteInternalAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ try
+ {
+ var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false);
+ await rootBuilder.ItemWithPath(NormalizePath(path)).DeleteAsync(cancellationToken: cancellationToken)
+ .ConfigureAwait(false);
+ return true;
+ }
+ catch (ODataError ex) when (ex.ResponseStatusCode == 404)
+ {
+ return false;
+ }
+ }
+
+ private async Task ExistsInternalAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ var item = await GetMetadataInternalAsync(driveId, path, cancellationToken).ConfigureAwait(false);
+ return item != null;
+ }
+
+ private async Task GetMetadataInternalAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ try
+ {
+ var rootBuilder = await GetRootItemBuilderAsync(driveId, cancellationToken).ConfigureAwait(false);
+ return await rootBuilder.ItemWithPath(NormalizePath(path)).GetAsync(cancellationToken: cancellationToken)
+ .ConfigureAwait(false);
+ }
+ catch (ODataError ex) when (ex.ResponseStatusCode == 404)
+ {
+ return null;
+ }
+ }
+
+ private async IAsyncEnumerable ListInternalAsync(string driveId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ var normalized = string.IsNullOrWhiteSpace(directory) ? null : NormalizePath(directory!);
+ var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false);
+ var parent = normalized == null
+ ? await _graphServiceClient.Drives[resolvedDriveId].Root.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false)
+ : await _graphServiceClient.Drives[resolvedDriveId].Root.ItemWithPath(normalized).GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ if (parent?.Id == null)
+ {
+ yield break;
+ }
+
+ var builder = _graphServiceClient.Drives[resolvedDriveId].Items[parent.Id].Children;
+ var page = await builder.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+ if (page?.Value == null)
+ {
+ yield break;
+ }
+
+ foreach (var item in page.Value)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+ if (item != null)
+ {
+ yield return item;
+ }
+ }
+ }
+
+ private async Task GetRootItemBuilderAsync(string driveId, CancellationToken cancellationToken)
+ {
+ var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false);
+ return _graphServiceClient.Drives[resolvedDriveId].Root;
+ }
+
+ private async Task GetChildrenBuilderAsync(string driveId, string parentId, CancellationToken cancellationToken)
+ {
+ var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false);
+ return _graphServiceClient.Drives[resolvedDriveId].Items[parentId].Children;
+ }
+
+ private async Task GetRootDriveItemAsync(string driveId, CancellationToken cancellationToken)
+ {
+ var resolvedDriveId = await ResolveDriveIdAsync(driveId, cancellationToken).ConfigureAwait(false);
+ var root = await _graphServiceClient.Drives[resolvedDriveId].Root.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+ return root ?? throw new InvalidOperationException("Drive root is unavailable for the configured account.");
+ }
+
+ private async Task ResolveDriveIdAsync(string driveId, CancellationToken cancellationToken)
+ {
+ if (!driveId.Equals("me", StringComparison.OrdinalIgnoreCase))
+ {
+ return driveId;
+ }
+
+ var drive = await _graphServiceClient.Me.Drive.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+ return drive?.Id ?? throw new InvalidOperationException("Unable to resolve the current user's drive id.");
+ }
+
+ private async Task FindChildAsync(string driveId, string parentId, string name, CancellationToken cancellationToken)
+ {
+ var childrenBuilder = await GetChildrenBuilderAsync(driveId, parentId, cancellationToken).ConfigureAwait(false);
+ var children = await childrenBuilder.GetAsync(cancellationToken: cancellationToken).ConfigureAwait(false);
+
+ return children?.Value?.FirstOrDefault(c => string.Equals(c?.Name, name, StringComparison.OrdinalIgnoreCase));
+ }
+
+ private static string NormalizePath(string path)
+ {
+ return path.Replace("\\", "/").Trim('/');
+ }
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs b/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs
new file mode 100644
index 0000000..936a4a9
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/Clients/IOneDriveClient.cs
@@ -0,0 +1,24 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Threading;
+using System.Threading.Tasks;
+using Microsoft.Graph.Models;
+
+namespace ManagedCode.Storage.OneDrive.Clients;
+
+public interface IOneDriveClient
+{
+ Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken);
+
+ Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken);
+
+ Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken);
+
+ Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken);
+
+ Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken);
+
+ Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken);
+
+ IAsyncEnumerable ListAsync(string driveId, string? directory, CancellationToken cancellationToken);
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs
new file mode 100644
index 0000000..8156f47
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/IOneDriveStorage.cs
@@ -0,0 +1,9 @@
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.OneDrive.Clients;
+using ManagedCode.Storage.OneDrive.Options;
+
+namespace ManagedCode.Storage.OneDrive;
+
+public interface IOneDriveStorage : IStorage
+{
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj
new file mode 100644
index 0000000..eae4f6d
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/ManagedCode.Storage.OneDrive.csproj
@@ -0,0 +1,19 @@
+
+
+ true
+
+
+ ManagedCode.Storage.OneDrive
+ ManagedCode.Storage.OneDrive
+ Storage provider for Microsoft OneDrive built on Microsoft Graph.
+ managedcode, storage, onedrive, microsoft graph
+
+
+
+
+
+
+
+
+
+
diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs
new file mode 100644
index 0000000..4f3d59d
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorage.cs
@@ -0,0 +1,254 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using ManagedCode.Communication;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Models;
+using ManagedCode.Storage.OneDrive.Clients;
+using ManagedCode.Storage.OneDrive.Options;
+using Microsoft.Extensions.Logging;
+using Microsoft.Graph.Models;
+
+namespace ManagedCode.Storage.OneDrive;
+
+public class OneDriveStorage : BaseStorage, IOneDriveStorage
+{
+ private readonly ILogger? _logger;
+
+ public OneDriveStorage(OneDriveStorageOptions storageOptions, ILogger? logger = null) : base(storageOptions)
+ {
+ _logger = logger;
+ }
+
+ protected override IOneDriveClient CreateStorageClient()
+ {
+ if (StorageOptions.Client != null)
+ {
+ return StorageOptions.Client;
+ }
+
+ if (StorageOptions.GraphClient != null)
+ {
+ return new GraphOneDriveClient(StorageOptions.GraphClient);
+ }
+
+ throw new InvalidOperationException("Graph client is not configured for OneDrive storage.");
+ }
+
+ protected override async Task CreateContainerInternalAsync(CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await StorageClient.EnsureRootAsync(StorageOptions.DriveId, StorageOptions.RootPath, StorageOptions.CreateContainerIfNotExists, cancellationToken);
+ IsContainerCreated = true;
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override Task RemoveContainerAsync(CancellationToken cancellationToken = default)
+ {
+ // OneDrive containers map to drives or root folders that are typically managed by the account owner.
+ return Task.FromResult(Result.Fail(new NotSupportedException("Deleting a OneDrive container is not supported.")));
+ }
+
+ protected override async Task DeleteDirectoryInternalAsync(string directory, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var normalizedDirectory = NormalizeRelativePath(directory);
+
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken))
+ {
+ if (item?.Folder != null)
+ {
+ continue;
+ }
+
+ var path = $"{normalizedDirectory}/{item!.Name}".Trim('/');
+ await StorageClient.DeleteAsync(StorageOptions.DriveId, path, cancellationToken);
+ }
+
+ return Result.Succeed();
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> UploadInternalAsync(Stream stream, UploadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var uploaded = await StorageClient.UploadAsync(StorageOptions.DriveId, path, stream, options.MimeType, cancellationToken);
+ return Result.Succeed(ToBlobMetadata(uploaded, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DownloadInternalAsync(LocalFile localFile, DownloadOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var remoteStream = await StorageClient.DownloadAsync(StorageOptions.DriveId, path, cancellationToken);
+ cancellationToken.ThrowIfCancellationRequested();
+
+ await using (remoteStream)
+ await using (var fileStream = localFile.FileStream)
+ {
+ await remoteStream.CopyToAsync(fileStream, cancellationToken);
+ fileStream.Position = 0;
+ }
+
+ return Result.Succeed(localFile);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> DeleteInternalAsync(DeleteOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var deleted = await StorageClient.DeleteAsync(StorageOptions.DriveId, path, cancellationToken);
+ return Result.Succeed(deleted);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> ExistsInternalAsync(ExistOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var exists = await StorageClient.ExistsAsync(StorageOptions.DriveId, path, cancellationToken);
+ return Result.Succeed(exists);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override async Task> GetBlobMetadataInternalAsync(MetadataOptions options, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(options.FullPath);
+ var item = await StorageClient.GetMetadataAsync(StorageOptions.DriveId, path, cancellationToken);
+ return item == null
+ ? Result.Fail(new FileNotFoundException($"File '{path}' not found in OneDrive."))
+ : Result.Succeed(ToBlobMetadata(item, path));
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ public override async IAsyncEnumerable GetBlobMetadataListAsync(string? directory = null, [EnumeratorCancellation] CancellationToken cancellationToken = default)
+ {
+ await EnsureContainerExist(cancellationToken);
+
+ var normalizedDirectory = string.IsNullOrWhiteSpace(directory) ? null : NormalizeRelativePath(directory!);
+ await foreach (var item in StorageClient.ListAsync(StorageOptions.DriveId, normalizedDirectory, cancellationToken))
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+ if (item == null || item.Folder != null)
+ {
+ continue;
+ }
+
+ var fullPath = normalizedDirectory == null ? item.Name! : $"{normalizedDirectory}/{item.Name}";
+ yield return ToBlobMetadata(item, fullPath);
+ }
+ }
+
+ public override async Task> GetStreamAsync(string fileName, CancellationToken cancellationToken = default)
+ {
+ try
+ {
+ await EnsureContainerExist(cancellationToken);
+ var path = BuildFullPath(fileName);
+ var stream = await StorageClient.DownloadAsync(StorageOptions.DriveId, path, cancellationToken);
+ return Result.Succeed(stream);
+ }
+ catch (Exception ex)
+ {
+ _logger.LogException(ex);
+ return Result.Fail(ex);
+ }
+ }
+
+ protected override Task SetLegalHoldInternalAsync(bool hasLegalHold, LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ // OneDrive does not expose legal hold controls through the Graph SDK used here.
+ return Task.FromResult(Result.Succeed());
+ }
+
+ protected override Task> HasLegalHoldInternalAsync(LegalHoldOptions options, CancellationToken cancellationToken = default)
+ {
+ // OneDrive does not expose legal hold controls through the Graph SDK used here.
+ return Task.FromResult(Result.Succeed(false));
+ }
+
+ private string BuildFullPath(string? relativePath)
+ {
+ var normalized = NormalizeRelativePath(relativePath ?? string.Empty);
+ var root = NormalizeRelativePath(StorageOptions.RootPath);
+ return string.IsNullOrWhiteSpace(root) ? normalized : string.IsNullOrWhiteSpace(normalized) ? root : $"{root}/{normalized}";
+ }
+
+ private static string NormalizeRelativePath(string path)
+ {
+ return path.Replace("\\", "/").Trim('/');
+ }
+
+ private BlobMetadata ToBlobMetadata(DriveItem item, string fullName)
+ {
+ return new BlobMetadata
+ {
+ Name = item.Name ?? Path.GetFileName(fullName),
+ FullName = fullName,
+ Container = StorageOptions.DriveId,
+ Uri = item.WebUrl != null ? new Uri(item.WebUrl) : null,
+ CreatedOn = item.CreatedDateTime ?? DateTimeOffset.UtcNow,
+ LastModified = item.LastModifiedDateTime ?? DateTimeOffset.UtcNow,
+ Length = (ulong)(item.Size ?? 0),
+ MimeType = item.File?.MimeType,
+ Metadata = item.AdditionalData?.ToDictionary(k => k.Key, v => v.Value?.ToString() ?? string.Empty) ?? new Dictionary()
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs
new file mode 100644
index 0000000..3ec3dcc
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/OneDriveStorageProvider.cs
@@ -0,0 +1,40 @@
+using System;
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.Core.Extensions;
+using ManagedCode.Storage.Core.Providers;
+using ManagedCode.Storage.OneDrive.Options;
+using Microsoft.Extensions.Logging;
+
+namespace ManagedCode.Storage.OneDrive;
+
+public class OneDriveStorageProvider(IServiceProvider serviceProvider, OneDriveStorageOptions defaultOptions) : IStorageProvider
+{
+ public Type StorageOptionsType => typeof(OneDriveStorageOptions);
+
+ public TStorage CreateStorage(TOptions options)
+ where TStorage : class, IStorage
+ where TOptions : class, IStorageOptions
+ {
+ if (options is not OneDriveStorageOptions driveOptions)
+ {
+ throw new ArgumentException($"Options must be of type {typeof(OneDriveStorageOptions)}", nameof(options));
+ }
+
+ var logger = serviceProvider.GetService(typeof(ILogger)) as ILogger;
+ var storage = new OneDriveStorage(driveOptions, logger);
+
+ return storage as TStorage ?? throw new InvalidOperationException($"Cannot create storage of type {typeof(TStorage)}");
+ }
+
+ public IStorageOptions GetDefaultOptions()
+ {
+ return new OneDriveStorageOptions
+ {
+ DriveId = defaultOptions.DriveId,
+ RootPath = defaultOptions.RootPath,
+ GraphClient = defaultOptions.GraphClient,
+ Client = defaultOptions.Client,
+ CreateContainerIfNotExists = defaultOptions.CreateContainerIfNotExists
+ };
+ }
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs b/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs
new file mode 100644
index 0000000..dfc9b95
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/Options/OneDriveStorageOptions.cs
@@ -0,0 +1,18 @@
+using ManagedCode.Storage.Core;
+using ManagedCode.Storage.OneDrive.Clients;
+using Microsoft.Graph;
+
+namespace ManagedCode.Storage.OneDrive.Options;
+
+public class OneDriveStorageOptions : IStorageOptions
+{
+ public Clients.IOneDriveClient? Client { get; set; }
+
+ public GraphServiceClient? GraphClient { get; set; }
+
+ public string DriveId { get; set; } = "me";
+
+ public string RootPath { get; set; } = "/";
+
+ public bool CreateContainerIfNotExists { get; set; } = true;
+}
diff --git a/Storages/ManagedCode.Storage.OneDrive/PLAN.md b/Storages/ManagedCode.Storage.OneDrive/PLAN.md
new file mode 100644
index 0000000..058e4f1
--- /dev/null
+++ b/Storages/ManagedCode.Storage.OneDrive/PLAN.md
@@ -0,0 +1,9 @@
+# OneDrive integration plan
+
+- [x] Reference the official `Microsoft.Graph` SDK and configure `GraphServiceClient` injection through `OneDriveStorageOptions`.
+- [x] Implement `IOneDriveClient` plus `GraphOneDriveClient` to mirror upload, download, metadata, and listing APIs documented for Microsoft Graph drives.
+- [x] Create `OneDriveStorage` that adapts `BaseStorage` to OneDrive paths, normalizes root prefixes, and returns `BlobMetadata` compatible with the shared abstractions.
+- [x] Provide DI-friendly `OneDriveStorageProvider` so ASP.NET and worker hosts can register the provider alongside keyed/default storage bindings.
+- [ ] Add sample ASP.NET controller snippets showing how to request delegated or app-only permissions and pass a configured `GraphServiceClient` into `OneDriveStorageOptions`.
+- [ ] Extend tests with `IOneDriveClient` mocks that mirror Graph responses for uploads, downloads, listings, deletion, and metadata resolution.
+- [ ] Document user-facing setup: Azure App Registration, scopes (`Files.ReadWrite.All`), and the minimal token acquisition steps for CLI and ASP.NET hosts.
diff --git a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj
index bdae9f0..e6bad4e 100644
--- a/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj
+++ b/Tests/ManagedCode.Storage.Tests/ManagedCode.Storage.Tests.csproj
@@ -52,6 +52,9 @@
+
+
+
diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs
new file mode 100644
index 0000000..520f864
--- /dev/null
+++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/CloudDriveStorageTests.cs
@@ -0,0 +1,391 @@
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Runtime.CompilerServices;
+using System.Threading;
+using System.Threading.Tasks;
+using Google.Apis.Drive.v3.Data;
+using ManagedCode.Storage.Core.Models;
+using ManagedCode.Storage.Dropbox;
+using ManagedCode.Storage.Dropbox.Clients;
+using ManagedCode.Storage.Dropbox.Options;
+using ManagedCode.Storage.GoogleDrive;
+using ManagedCode.Storage.GoogleDrive.Clients;
+using ManagedCode.Storage.GoogleDrive.Options;
+using ManagedCode.Storage.OneDrive;
+using ManagedCode.Storage.OneDrive.Clients;
+using ManagedCode.Storage.OneDrive.Options;
+using Microsoft.Graph.Models;
+using Shouldly;
+using Xunit;
+using File = Google.Apis.Drive.v3.Data.File;
+
+namespace ManagedCode.Storage.Tests.Storages.CloudDrive;
+
+public class CloudDriveStorageTests
+{
+ [Fact]
+ public async Task OneDrive_FakeClient_RoundTrip()
+ {
+ var fakeClient = new FakeOneDriveClient();
+ var storage = new OneDriveStorage(new OneDriveStorageOptions
+ {
+ Client = fakeClient,
+ DriveId = "drive",
+ RootPath = "root"
+ });
+
+ var uploadResult = await storage.UploadAsync("hello world", options => options.FileName = "text.txt");
+ uploadResult.IsSuccess.ShouldBeTrue();
+
+ var exists = await storage.ExistsAsync("text.txt");
+ exists.Value.ShouldBeTrue();
+
+ var metadata = await storage.GetBlobMetadataAsync("text.txt");
+ metadata.Value.Name.ShouldBe("text.txt");
+
+ var download = await storage.DownloadAsync("text.txt");
+ using var reader = new StreamReader(download.Value.FileStream);
+ (await reader.ReadToEndAsync()).ShouldBe("hello world");
+
+ var listed = new List();
+ await foreach (var item in storage.GetBlobMetadataListAsync())
+ {
+ listed.Add(item);
+ }
+
+ listed.ShouldContain(m => m.FullName.EndsWith("text.txt"));
+ }
+
+ [Fact]
+ public async Task OneDrive_RemoveContainer_NotSupported()
+ {
+ var fakeClient = new FakeOneDriveClient();
+ var storage = new OneDriveStorage(new OneDriveStorageOptions
+ {
+ Client = fakeClient,
+ DriveId = "drive",
+ RootPath = "root"
+ });
+
+ var result = await storage.RemoveContainerAsync();
+ result.IsSuccess.ShouldBeFalse();
+ }
+
+ [Fact]
+ public async Task GoogleDrive_FakeClient_RoundTrip()
+ {
+ var fakeClient = new FakeGoogleDriveClient();
+ var storage = new GoogleDriveStorage(new GoogleDriveStorageOptions
+ {
+ Client = fakeClient,
+ RootFolderId = "root"
+ });
+
+ var uploadResult = await storage.UploadAsync("drive content", options => options.FileName = "data.bin");
+ uploadResult.IsSuccess.ShouldBeTrue();
+
+ var exists = await storage.ExistsAsync("data.bin");
+ exists.Value.ShouldBeTrue();
+
+ var metadata = await storage.GetBlobMetadataAsync("data.bin");
+ metadata.Value.FullName.ShouldBe("data.bin");
+
+ var download = await storage.DownloadAsync("data.bin");
+ using var reader = new StreamReader(download.Value.FileStream);
+ (await reader.ReadToEndAsync()).ShouldBe("drive content");
+
+ var listed = new List();
+ await foreach (var item in storage.GetBlobMetadataListAsync())
+ {
+ listed.Add(item);
+ }
+
+ listed.ShouldContain(m => m.FullName.Contains("data.bin"));
+ }
+
+ [Fact]
+ public async Task Dropbox_FakeClient_RoundTrip()
+ {
+ var fakeClient = new FakeDropboxClient();
+ var storage = new DropboxStorage(new DropboxStorageOptions
+ {
+ Client = fakeClient,
+ RootPath = "/apps/demo"
+ });
+
+ var uploadResult = await storage.UploadAsync("dropbox payload", options => options.FileName = "file.json");
+ uploadResult.IsSuccess.ShouldBeTrue();
+
+ var exists = await storage.ExistsAsync("file.json");
+ exists.Value.ShouldBeTrue();
+
+ var metadata = await storage.GetBlobMetadataAsync("file.json");
+ metadata.Value.Name.ShouldBe("file.json");
+
+ var download = await storage.DownloadAsync("file.json");
+ using var reader = new StreamReader(download.Value.FileStream);
+ (await reader.ReadToEndAsync()).ShouldBe("dropbox payload");
+
+ var listed = new List();
+ await foreach (var item in storage.GetBlobMetadataListAsync())
+ {
+ listed.Add(item);
+ }
+
+ listed.ShouldContain(m => m.FullName.Contains("file.json"));
+ }
+
+ private class FakeOneDriveClient : IOneDriveClient
+ {
+ private readonly InMemoryDrive _drive = new();
+
+ public Task EnsureRootAsync(string driveId, string rootPath, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ _drive.Root = rootPath;
+ return Task.CompletedTask;
+ }
+
+ public Task UploadAsync(string driveId, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var entry = _drive.Save(path, content, contentType);
+ return Task.FromResult(entry.ToDriveItem(path));
+ }
+
+ public Task DownloadAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Download(path));
+ }
+
+ public Task DeleteAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Delete(path));
+ }
+
+ public Task ExistsAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Exists(path));
+ }
+
+ public Task GetMetadataAsync(string driveId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Get(path)?.ToDriveItem(path));
+ }
+
+ public async IAsyncEnumerable ListAsync(string driveId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ await foreach (var entry in _drive.List(directory, cancellationToken))
+ {
+ yield return entry.ToDriveItem(entry.Path);
+ }
+ }
+ }
+
+ private class FakeGoogleDriveClient : IGoogleDriveClient
+ {
+ private readonly InMemoryDrive _drive = new();
+
+ public Task EnsureRootAsync(string rootFolderId, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ _drive.Root = rootFolderId;
+ return Task.CompletedTask;
+ }
+
+ public Task UploadAsync(string rootFolderId, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var entry = _drive.Save(path, content, contentType);
+ return Task.FromResult(entry.ToGoogleFile(path));
+ }
+
+ public Task DownloadAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Download(path));
+ }
+
+ public Task DeleteAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Delete(path));
+ }
+
+ public Task ExistsAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Exists(path));
+ }
+
+ public Task GetMetadataAsync(string rootFolderId, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Get(path)?.ToGoogleFile(path));
+ }
+
+ public async IAsyncEnumerable ListAsync(string rootFolderId, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ await foreach (var entry in _drive.List(directory, cancellationToken))
+ {
+ yield return entry.ToGoogleFile(entry.Path);
+ }
+ }
+ }
+
+ private class FakeDropboxClient : IDropboxClientWrapper
+ {
+ private readonly InMemoryDrive _drive = new();
+
+ public Task EnsureRootAsync(string rootPath, bool createIfNotExists, CancellationToken cancellationToken)
+ {
+ _drive.Root = rootPath;
+ return Task.CompletedTask;
+ }
+
+ public Task UploadAsync(string rootPath, string path, Stream content, string? contentType, CancellationToken cancellationToken)
+ {
+ var entry = _drive.Save(path, content, contentType);
+ return Task.FromResult(entry.ToDropboxFile(path));
+ }
+
+ public Task DownloadAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Download(path));
+ }
+
+ public Task DeleteAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Delete(path));
+ }
+
+ public Task ExistsAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Exists(path));
+ }
+
+ public Task GetMetadataAsync(string rootPath, string path, CancellationToken cancellationToken)
+ {
+ return Task.FromResult(_drive.Get(path)?.ToDropboxFile(path));
+ }
+
+ public async IAsyncEnumerable ListAsync(string rootPath, string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ await foreach (var entry in _drive.List(directory, cancellationToken))
+ {
+ yield return entry.ToDropboxFile(entry.Path);
+ }
+ }
+ }
+
+ private class InMemoryDrive
+ {
+ private readonly Dictionary _entries = new();
+
+ public string Root { get; set; } = string.Empty;
+
+ public DriveEntry Save(string path, Stream content, string? contentType)
+ {
+ using var ms = new MemoryStream();
+ content.CopyTo(ms);
+ var data = ms.ToArray();
+ var entry = new DriveEntry
+ {
+ Content = data,
+ ContentType = contentType ?? "application/octet-stream",
+ Created = System.DateTimeOffset.UtcNow,
+ Updated = System.DateTimeOffset.UtcNow,
+ Path = Normalize(path)
+ };
+
+ _entries[entry.Path] = entry;
+ return entry;
+ }
+
+ public bool Delete(string path)
+ {
+ return _entries.Remove(Normalize(path));
+ }
+
+ public bool Exists(string path)
+ {
+ return _entries.ContainsKey(Normalize(path));
+ }
+
+ public DriveEntry? Get(string path)
+ {
+ return _entries.TryGetValue(Normalize(path), out var entry) ? entry : null;
+ }
+
+ public Stream Download(string path)
+ {
+ var normalized = Normalize(path);
+ if (!_entries.TryGetValue(normalized, out var entry))
+ {
+ throw new FileNotFoundException(path);
+ }
+
+ return new MemoryStream(entry.Content, writable: false);
+ }
+
+ public async IAsyncEnumerable List(string? directory, [EnumeratorCancellation] CancellationToken cancellationToken)
+ {
+ var normalized = string.IsNullOrWhiteSpace(directory) ? null : Normalize(directory!);
+ foreach (var entry in _entries.Values)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+ if (normalized == null || entry.Path.StartsWith(normalized))
+ {
+ yield return entry;
+ }
+ }
+
+ await Task.CompletedTask;
+ }
+
+ private string Normalize(string path)
+ {
+ return path.Replace("\\", "/").Trim('/');
+ }
+ }
+
+ internal class DriveEntry
+ {
+ public required string Path { get; set; }
+ public required byte[] Content { get; set; }
+ public required string ContentType { get; set; }
+ public required System.DateTimeOffset Created { get; set; }
+ public required System.DateTimeOffset Updated { get; set; }
+ }
+}
+
+internal static class DriveEntryExtensions
+{
+ public static DriveItem ToDriveItem(this CloudDriveStorageTests.DriveEntry entry, string fullPath)
+ {
+ return new DriveItem
+ {
+ Name = System.IO.Path.GetFileName(fullPath),
+ Size = entry.Content.LongLength,
+ CreatedDateTime = entry.Created,
+ LastModifiedDateTime = entry.Updated
+ };
+ }
+
+ public static File ToGoogleFile(this CloudDriveStorageTests.DriveEntry entry, string fullPath)
+ {
+ return new File
+ {
+ Name = System.IO.Path.GetFileName(fullPath),
+ Size = entry.Content.LongLength,
+ CreatedTimeDateTimeOffset = entry.Created,
+ ModifiedTimeDateTimeOffset = entry.Updated,
+ MimeType = entry.ContentType
+ };
+ }
+
+ public static DropboxItemMetadata ToDropboxFile(this CloudDriveStorageTests.DriveEntry entry, string fullPath)
+ {
+ return new DropboxItemMetadata
+ {
+ Name = System.IO.Path.GetFileName(fullPath),
+ Path = entry.Path,
+ Size = (ulong)entry.Content.LongLength,
+ ClientModified = entry.Created.UtcDateTime,
+ ServerModified = entry.Updated.UtcDateTime
+ };
+ }
+}
diff --git a/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs
new file mode 100644
index 0000000..429d6b2
--- /dev/null
+++ b/Tests/ManagedCode.Storage.Tests/Storages/CloudDrive/GraphOneDriveClientTests.cs
@@ -0,0 +1,339 @@
+using System;
+using System.Collections.Generic;
+using System.IO;
+using System.Linq;
+using System.Net;
+using System.Net.Http;
+using System.Text;
+using System.Text.Json;
+using System.Threading;
+using System.Threading.Tasks;
+using Azure;
+using Azure.Core;
+using ManagedCode.Storage.OneDrive.Clients;
+using Microsoft.Graph;
+using Microsoft.Graph.Models;
+using Shouldly;
+using Xunit;
+
+namespace ManagedCode.Storage.Tests.Storages.CloudDrive;
+
+public class GraphOneDriveClientTests
+{
+ private const string RootKey = "root";
+
+ [Fact]
+ public async Task GraphClient_EndToEnd()
+ {
+ var handler = new FakeGraphHandler();
+ var client = CreateGraphClient(handler);
+ var storageClient = new GraphOneDriveClient(client);
+
+ await storageClient.EnsureRootAsync("me", "work", true, CancellationToken.None);
+
+ await using (var uploadStream = new MemoryStream(Encoding.UTF8.GetBytes("graph payload")))
+ {
+ var uploaded = await storageClient.UploadAsync("me", "work/doc.txt", uploadStream, "text/plain", CancellationToken.None);
+ uploaded.Name.ShouldBe("doc.txt");
+ }
+
+ (await storageClient.ExistsAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeTrue();
+
+ var metadata = await storageClient.GetMetadataAsync("me", "work/doc.txt", CancellationToken.None);
+ metadata.ShouldNotBeNull();
+ metadata!.Size.ShouldBe((long)"graph payload".Length);
+
+ await using (var downloaded = await storageClient.DownloadAsync("me", "work/doc.txt", CancellationToken.None))
+ using (var reader = new StreamReader(downloaded))
+ {
+ (await reader.ReadToEndAsync()).ShouldBe("graph payload");
+ }
+
+ var listed = new List();
+ await foreach (var item in storageClient.ListAsync("me", "work", CancellationToken.None))
+ {
+ listed.Add(item);
+ }
+
+ listed.ShouldContain(i => i.Name == "doc.txt");
+
+ (await storageClient.DeleteAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeTrue();
+ (await storageClient.ExistsAsync("me", "work/doc.txt", CancellationToken.None)).ShouldBeFalse();
+ }
+
+ private static GraphServiceClient CreateGraphClient(HttpMessageHandler handler)
+ {
+ var scopes = new[] { "https://graph.microsoft.com/.default" };
+ var credential = new FakeTokenCredential();
+ var httpClient = new HttpClient(handler)
+ {
+ BaseAddress = new Uri("https://graph.microsoft.com/v1.0")
+ };
+
+ return new GraphServiceClient(httpClient, credential, scopes, "https://graph.microsoft.com/v1.0");
+ }
+
+ private sealed class FakeTokenCredential : TokenCredential
+ {
+ public override AccessToken GetToken(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return new AccessToken("test-token", DateTimeOffset.UtcNow.AddHours(1));
+ }
+
+ public override ValueTask GetTokenAsync(TokenRequestContext requestContext, CancellationToken cancellationToken)
+ {
+ return new ValueTask(GetToken(requestContext, cancellationToken));
+ }
+ }
+
+ private sealed class FakeGraphHandler : HttpMessageHandler
+ {
+ private readonly Dictionary _entries = new(StringComparer.OrdinalIgnoreCase)
+ {
+ [RootKey] = GraphEntry.Root()
+ };
+
+ protected override Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
+ {
+ cancellationToken.ThrowIfCancellationRequested();
+
+ if (IsRootRequest(request.RequestUri))
+ {
+ return Task.FromResult(JsonResponse(_entries[RootKey]));
+ }
+
+ if (TryHandleChildrenRequest(request, out var childrenResponse))
+ {
+ return Task.FromResult(childrenResponse);
+ }
+
+ if (TryHandleItemRequest(request, out var itemResponse))
+ {
+ return Task.FromResult(itemResponse);
+ }
+
+ return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound));
+ }
+
+ private bool TryHandleItemRequest(HttpRequestMessage request, out HttpResponseMessage response)
+ {
+ response = new HttpResponseMessage(HttpStatusCode.NotFound);
+ var segments = request.RequestUri!.AbsolutePath.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries);
+ var contentSegment = segments.FirstOrDefault(s => s.EndsWith(":content", StringComparison.OrdinalIgnoreCase));
+
+ if (contentSegment != null)
+ {
+ var path = DecodePath(contentSegment.Replace(":content", string.Empty, StringComparison.OrdinalIgnoreCase));
+ return HandleContentRequest(request, path, ref response);
+ }
+
+ var itemWithPath = segments.FirstOrDefault(s => s.Contains(':'));
+ if (itemWithPath != null)
+ {
+ var path = DecodePath(itemWithPath.Trim(':'));
+ return HandleMetadataRequest(request.Method, path, ref response);
+ }
+
+ return false;
+ }
+
+ private bool HandleMetadataRequest(HttpMethod method, string path, ref HttpResponseMessage response)
+ {
+ var entry = _entries.Values.FirstOrDefault(v => string.Equals(v.Path, path, StringComparison.OrdinalIgnoreCase));
+ if (method == HttpMethod.Delete)
+ {
+ if (entry == null)
+ {
+ response = new HttpResponseMessage(HttpStatusCode.NotFound);
+ return true;
+ }
+
+ _entries.Remove(entry.Id);
+ response = new HttpResponseMessage(HttpStatusCode.NoContent);
+ return true;
+ }
+
+ if (entry == null)
+ {
+ response = new HttpResponseMessage(HttpStatusCode.NotFound);
+ return true;
+ }
+
+ response = JsonResponse(entry);
+ return true;
+ }
+
+ private bool HandleContentRequest(HttpRequestMessage request, string path, ref HttpResponseMessage response)
+ {
+ if (request.Method == HttpMethod.Put)
+ {
+ var parentPath = Path.GetDirectoryName(path)?.Replace("\\", "/").Trim('/') ?? string.Empty;
+ EnsureFolder(parentPath);
+
+ var buffer = request.Content!.ReadAsStream();
+ using var memory = new MemoryStream();
+ buffer.CopyTo(memory);
+ var entry = GraphEntry.File(Path.GetFileName(path), parentPath, memory.ToArray());
+ _entries[entry.Id] = entry;
+ response = JsonResponse(entry);
+ return true;
+ }
+
+ var existing = _entries.Values.FirstOrDefault(v => string.Equals(v.Path, path, StringComparison.OrdinalIgnoreCase));
+ if (existing == null)
+ {
+ response = new HttpResponseMessage(HttpStatusCode.NotFound);
+ return true;
+ }
+
+ response = new HttpResponseMessage(HttpStatusCode.OK)
+ {
+ Content = new ByteArrayContent(existing.Content)
+ };
+
+ return true;
+ }
+
+ private bool TryHandleChildrenRequest(HttpRequestMessage request, out HttpResponseMessage response)
+ {
+ response = new HttpResponseMessage(HttpStatusCode.NotFound);
+ var path = request.RequestUri!.AbsolutePath;
+ if (!path.EndsWith("/children", StringComparison.OrdinalIgnoreCase))
+ {
+ return false;
+ }
+
+ var idSegment = path.Contains("items", StringComparison.OrdinalIgnoreCase)
+ ? path.Split('/', StringSplitOptions.RemoveEmptyEntries).SkipWhile(s => !s.Equals("items", StringComparison.OrdinalIgnoreCase)).Skip(1).FirstOrDefault()
+ : RootKey;
+
+ if (request.Method == HttpMethod.Post)
+ {
+ var body = request.Content!.ReadAsStringAsync().GetAwaiter().GetResult();
+ var item = JsonSerializer.Deserialize(body, new JsonSerializerOptions
+ {
+ PropertyNameCaseInsensitive = true
+ });
+
+ var created = GraphEntry.Folder(item!.Name!, parentPath: _entries[idSegment ?? RootKey].Path);
+ _entries[created.Id] = created;
+ response = JsonResponse(created, HttpStatusCode.Created);
+ return true;
+ }
+
+ var children = _entries.Values.Where(e => string.Equals(e.ParentPath, _entries[idSegment ?? RootKey].Path, StringComparison.OrdinalIgnoreCase)).ToList();
+ response = JsonResponse(new DriveItemCollectionResponse
+ {
+ Value = children.Select(GraphEntry.ToDriveItem).ToList()
+ });
+
+ return true;
+ }
+
+ private static bool IsRootRequest(Uri? requestUri)
+ {
+ return requestUri != null && requestUri.AbsolutePath.TrimEnd('/').EndsWith("me/drive/root", StringComparison.OrdinalIgnoreCase);
+ }
+
+ private void EnsureFolder(string path)
+ {
+ var normalized = path.Trim('/');
+ if (string.IsNullOrWhiteSpace(normalized))
+ {
+ return;
+ }
+
+ if (_entries.Values.Any(e => string.Equals(e.Path, normalized, StringComparison.OrdinalIgnoreCase)))
+ {
+ return;
+ }
+
+ var parentPath = Path.GetDirectoryName(normalized)?.Replace("\\", "/").Trim('/') ?? string.Empty;
+ EnsureFolder(parentPath);
+
+ var folder = GraphEntry.Folder(Path.GetFileName(normalized), parentPath);
+ _entries[folder.Id] = folder;
+ }
+
+ private static string DecodePath(string segment)
+ {
+ return Uri.UnescapeDataString(segment.Replace("root:", string.Empty, StringComparison.OrdinalIgnoreCase)).Trim('/');
+ }
+
+ private static HttpResponseMessage JsonResponse(object content, HttpStatusCode status = HttpStatusCode.OK)
+ {
+ var response = new HttpResponseMessage(status)
+ {
+ Content = new StringContent(JsonSerializer.Serialize(content))
+ };
+
+ response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json");
+ return response;
+ }
+ }
+
+ private sealed class GraphEntry
+ {
+ public required string Id { get; init; }
+ public required string Name { get; init; }
+ public required string Path { get; init; }
+ public required string ParentPath { get; init; }
+ public byte[] Content { get; init; } = Array.Empty();
+ public bool IsFolder { get; init; }
+
+ public static GraphEntry Root()
+ {
+ return new GraphEntry
+ {
+ Id = RootKey,
+ Name = "root",
+ Path = string.Empty,
+ ParentPath = string.Empty,
+ IsFolder = true
+ };
+ }
+
+ public static GraphEntry Folder(string name, string parentPath)
+ {
+ var normalizedParent = parentPath.Trim('/');
+ var path = string.IsNullOrWhiteSpace(normalizedParent) ? name : $"{normalizedParent}/{name}";
+ return new GraphEntry
+ {
+ Id = Guid.NewGuid().ToString("N"),
+ Name = name,
+ Path = path,
+ ParentPath = normalizedParent,
+ IsFolder = true
+ };
+ }
+
+ public static GraphEntry File(string name, string parentPath, byte[] content)
+ {
+ var normalizedParent = parentPath.Trim('/');
+ var path = string.IsNullOrWhiteSpace(normalizedParent) ? name : $"{normalizedParent}/{name}";
+ return new GraphEntry
+ {
+ Id = Guid.NewGuid().ToString("N"),
+ Name = name,
+ Path = path,
+ ParentPath = normalizedParent,
+ Content = content,
+ IsFolder = false
+ };
+ }
+
+ public static DriveItem ToDriveItem(GraphEntry entry)
+ {
+ return new DriveItem
+ {
+ Id = entry.Id,
+ Name = entry.Name,
+ Size = entry.Content.LongLength,
+ CreatedDateTime = DateTimeOffset.UtcNow,
+ LastModifiedDateTime = DateTimeOffset.UtcNow,
+ File = entry.IsFolder ? null : new Microsoft.Graph.Models.FileObject(),
+ Folder = entry.IsFolder ? new Folder() : null
+ };
+ }
+ }
+}
diff --git a/dotnet-install.sh b/dotnet-install.sh
new file mode 100755
index 0000000..0e19528
--- /dev/null
+++ b/dotnet-install.sh
@@ -0,0 +1,1888 @@
+#!/usr/bin/env bash
+# Copyright (c) .NET Foundation and contributors. All rights reserved.
+# Licensed under the MIT license. See LICENSE file in the project root for full license information.
+#
+
+# Stop script on NZEC
+set -e
+# Stop script if unbound variable found (use ${var:-} if intentional)
+set -u
+# By default cmd1 | cmd2 returns exit code of cmd2 regardless of cmd1 success
+# This is causing it to fail
+set -o pipefail
+
+# Use in the the functions: eval $invocation
+invocation='say_verbose "Calling: ${yellow:-}${FUNCNAME[0]} ${green:-}$*${normal:-}"'
+
+# standard output may be used as a return value in the functions
+# we need a way to write text on the screen in the functions so that
+# it won't interfere with the return value.
+# Exposing stream 3 as a pipe to standard output of the script itself
+exec 3>&1
+
+# Setup some colors to use. These need to work in fairly limited shells, like the Ubuntu Docker container where there are only 8 colors.
+# See if stdout is a terminal
+if [ -t 1 ] && command -v tput > /dev/null; then
+ # see if it supports colors
+ ncolors=$(tput colors || echo 0)
+ if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
+ bold="$(tput bold || echo)"
+ normal="$(tput sgr0 || echo)"
+ black="$(tput setaf 0 || echo)"
+ red="$(tput setaf 1 || echo)"
+ green="$(tput setaf 2 || echo)"
+ yellow="$(tput setaf 3 || echo)"
+ blue="$(tput setaf 4 || echo)"
+ magenta="$(tput setaf 5 || echo)"
+ cyan="$(tput setaf 6 || echo)"
+ white="$(tput setaf 7 || echo)"
+ fi
+fi
+
+say_warning() {
+ printf "%b\n" "${yellow:-}dotnet_install: Warning: $1${normal:-}" >&3
+}
+
+say_err() {
+ printf "%b\n" "${red:-}dotnet_install: Error: $1${normal:-}" >&2
+}
+
+say() {
+ # using stream 3 (defined in the beginning) to not interfere with stdout of functions
+ # which may be used as return value
+ printf "%b\n" "${cyan:-}dotnet-install:${normal:-} $1" >&3
+}
+
+say_verbose() {
+ if [ "$verbose" = true ]; then
+ say "$1"
+ fi
+}
+
+# This platform list is finite - if the SDK/Runtime has supported Linux distribution-specific assets,
+# then and only then should the Linux distribution appear in this list.
+# Adding a Linux distribution to this list does not imply distribution-specific support.
+get_legacy_os_name_from_platform() {
+ eval $invocation
+
+ platform="$1"
+ case "$platform" in
+ "centos.7")
+ echo "centos"
+ return 0
+ ;;
+ "debian.8")
+ echo "debian"
+ return 0
+ ;;
+ "debian.9")
+ echo "debian.9"
+ return 0
+ ;;
+ "fedora.23")
+ echo "fedora.23"
+ return 0
+ ;;
+ "fedora.24")
+ echo "fedora.24"
+ return 0
+ ;;
+ "fedora.27")
+ echo "fedora.27"
+ return 0
+ ;;
+ "fedora.28")
+ echo "fedora.28"
+ return 0
+ ;;
+ "opensuse.13.2")
+ echo "opensuse.13.2"
+ return 0
+ ;;
+ "opensuse.42.1")
+ echo "opensuse.42.1"
+ return 0
+ ;;
+ "opensuse.42.3")
+ echo "opensuse.42.3"
+ return 0
+ ;;
+ "rhel.7"*)
+ echo "rhel"
+ return 0
+ ;;
+ "ubuntu.14.04")
+ echo "ubuntu"
+ return 0
+ ;;
+ "ubuntu.16.04")
+ echo "ubuntu.16.04"
+ return 0
+ ;;
+ "ubuntu.16.10")
+ echo "ubuntu.16.10"
+ return 0
+ ;;
+ "ubuntu.18.04")
+ echo "ubuntu.18.04"
+ return 0
+ ;;
+ "alpine.3.4.3")
+ echo "alpine"
+ return 0
+ ;;
+ esac
+ return 1
+}
+
+get_legacy_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ -n "$runtime_id" ]; then
+ echo $(get_legacy_os_name_from_platform "${runtime_id%-*}" || echo "${runtime_id%-*}")
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ os=$(get_legacy_os_name_from_platform "$ID${VERSION_ID:+.${VERSION_ID}}" || echo "")
+ if [ -n "$os" ]; then
+ echo "$os"
+ return 0
+ fi
+ fi
+ fi
+
+ say_verbose "Distribution specific OS name and version could not be detected: UName = $uname"
+ return 1
+}
+
+get_linux_platform_name() {
+ eval $invocation
+
+ if [ -n "$runtime_id" ]; then
+ echo "${runtime_id%-*}"
+ return 0
+ else
+ if [ -e /etc/os-release ]; then
+ . /etc/os-release
+ echo "$ID${VERSION_ID:+.${VERSION_ID}}"
+ return 0
+ elif [ -e /etc/redhat-release ]; then
+ local redhatRelease=$(&1 || true) | grep -q musl
+}
+
+get_current_os_name() {
+ eval $invocation
+
+ local uname=$(uname)
+ if [ "$uname" = "Darwin" ]; then
+ echo "osx"
+ return 0
+ elif [ "$uname" = "FreeBSD" ]; then
+ echo "freebsd"
+ return 0
+ elif [ "$uname" = "Linux" ]; then
+ local linux_platform_name=""
+ linux_platform_name="$(get_linux_platform_name)" || true
+
+ if [ "$linux_platform_name" = "rhel.6" ]; then
+ echo $linux_platform_name
+ return 0
+ elif is_musl_based_distro; then
+ echo "linux-musl"
+ return 0
+ elif [ "$linux_platform_name" = "linux-musl" ]; then
+ echo "linux-musl"
+ return 0
+ else
+ echo "linux"
+ return 0
+ fi
+ fi
+
+ say_err "OS name could not be detected: UName = $uname"
+ return 1
+}
+
+machine_has() {
+ eval $invocation
+
+ command -v "$1" > /dev/null 2>&1
+ return $?
+}
+
+check_min_reqs() {
+ local hasMinimum=false
+ if machine_has "curl"; then
+ hasMinimum=true
+ elif machine_has "wget"; then
+ hasMinimum=true
+ fi
+
+ if [ "$hasMinimum" = "false" ]; then
+ say_err "curl (recommended) or wget are required to download dotnet. Install missing prerequisite to proceed."
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# input - $1
+to_lowercase() {
+ #eval $invocation
+
+ echo "$1" | tr '[:upper:]' '[:lower:]'
+ return 0
+}
+
+# args:
+# input - $1
+remove_trailing_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input%/}"
+ return 0
+}
+
+# args:
+# input - $1
+remove_beginning_slash() {
+ #eval $invocation
+
+ local input="${1:-}"
+ echo "${input#/}"
+ return 0
+}
+
+# args:
+# root_path - $1
+# child_path - $2 - this parameter can be empty
+combine_paths() {
+ eval $invocation
+
+ # TODO: Consider making it work with any number of paths. For now:
+ if [ ! -z "${3:-}" ]; then
+ say_err "combine_paths: Function takes two parameters."
+ return 1
+ fi
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local child_path="$(remove_beginning_slash "${2:-}")"
+ say_verbose "combine_paths: root_path=$root_path"
+ say_verbose "combine_paths: child_path=$child_path"
+ echo "$root_path/$child_path"
+ return 0
+}
+
+get_machine_architecture() {
+ eval $invocation
+
+ if command -v uname > /dev/null; then
+ CPUName=$(uname -m)
+ case $CPUName in
+ armv1*|armv2*|armv3*|armv4*|armv5*|armv6*)
+ echo "armv6-or-below"
+ return 0
+ ;;
+ armv*l)
+ echo "arm"
+ return 0
+ ;;
+ aarch64|arm64)
+ if [ "$(getconf LONG_BIT)" -lt 64 ]; then
+ # This is 32-bit OS running on 64-bit CPU (for example Raspberry Pi OS)
+ echo "arm"
+ return 0
+ fi
+ echo "arm64"
+ return 0
+ ;;
+ s390x)
+ echo "s390x"
+ return 0
+ ;;
+ ppc64le)
+ echo "ppc64le"
+ return 0
+ ;;
+ loongarch64)
+ echo "loongarch64"
+ return 0
+ ;;
+ riscv64)
+ echo "riscv64"
+ return 0
+ ;;
+ powerpc|ppc)
+ echo "ppc"
+ return 0
+ ;;
+ esac
+ fi
+
+ # Always default to 'x64'
+ echo "x64"
+ return 0
+}
+
+# args:
+# architecture - $1
+get_normalized_architecture_from_architecture() {
+ eval $invocation
+
+ local architecture="$(to_lowercase "$1")"
+
+ if [[ $architecture == \ ]]; then
+ machine_architecture="$(get_machine_architecture)"
+ if [[ "$machine_architecture" == "armv6-or-below" ]]; then
+ say_err "Architecture \`$machine_architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
+ return 1
+ fi
+
+ echo $machine_architecture
+ return 0
+ fi
+
+ case "$architecture" in
+ amd64|x64)
+ echo "x64"
+ return 0
+ ;;
+ arm)
+ echo "arm"
+ return 0
+ ;;
+ arm64)
+ echo "arm64"
+ return 0
+ ;;
+ s390x)
+ echo "s390x"
+ return 0
+ ;;
+ ppc64le)
+ echo "ppc64le"
+ return 0
+ ;;
+ loongarch64)
+ echo "loongarch64"
+ return 0
+ ;;
+ esac
+
+ say_err "Architecture \`$architecture\` not supported. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues"
+ return 1
+}
+
+# args:
+# version - $1
+# channel - $2
+# architecture - $3
+get_normalized_architecture_for_specific_sdk_version() {
+ eval $invocation
+
+ local is_version_support_arm64="$(is_arm64_supported "$1")"
+ local is_channel_support_arm64="$(is_arm64_supported "$2")"
+ local architecture="$3";
+ local osname="$(get_current_os_name)"
+
+ if [ "$osname" == "osx" ] && [ "$architecture" == "arm64" ] && { [ "$is_version_support_arm64" = false ] || [ "$is_channel_support_arm64" = false ]; }; then
+ #check if rosetta is installed
+ if [ "$(/usr/bin/pgrep oahd >/dev/null 2>&1;echo $?)" -eq 0 ]; then
+ say_verbose "Changing user architecture from '$architecture' to 'x64' because .NET SDKs prior to version 6.0 do not support arm64."
+ echo "x64"
+ return 0;
+ else
+ say_err "Architecture \`$architecture\` is not supported for .NET SDK version \`$version\`. Please install Rosetta to allow emulation of the \`$architecture\` .NET SDK on this platform"
+ return 1
+ fi
+ fi
+
+ echo "$architecture"
+ return 0
+}
+
+# args:
+# version or channel - $1
+is_arm64_supported() {
+ # Extract the major version by splitting on the dot
+ major_version="${1%%.*}"
+
+ # Check if the major version is a valid number and less than 6
+ case "$major_version" in
+ [0-9]*)
+ if [ "$major_version" -lt 6 ]; then
+ echo false
+ return 0
+ fi
+ ;;
+ esac
+
+ echo true
+ return 0
+}
+
+# args:
+# user_defined_os - $1
+get_normalized_os() {
+ eval $invocation
+
+ local osname="$(to_lowercase "$1")"
+ if [ ! -z "$osname" ]; then
+ case "$osname" in
+ osx | freebsd | rhel.6 | linux-musl | linux)
+ echo "$osname"
+ return 0
+ ;;
+ macos)
+ osname='osx'
+ echo "$osname"
+ return 0
+ ;;
+ *)
+ say_err "'$user_defined_os' is not a supported value for --os option, supported values are: osx, macos, linux, linux-musl, freebsd, rhel.6. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
+ return 1
+ ;;
+ esac
+ else
+ osname="$(get_current_os_name)" || return 1
+ fi
+ echo "$osname"
+ return 0
+}
+
+# args:
+# quality - $1
+get_normalized_quality() {
+ eval $invocation
+
+ local quality="$(to_lowercase "$1")"
+ if [ ! -z "$quality" ]; then
+ case "$quality" in
+ daily | preview)
+ echo "$quality"
+ return 0
+ ;;
+ ga)
+ #ga quality is available without specifying quality, so normalizing it to empty
+ return 0
+ ;;
+ *)
+ say_err "'$quality' is not a supported value for --quality option. Supported values are: daily, preview, ga. If you think this is a bug, report it at https://github.com/dotnet/install-scripts/issues."
+ return 1
+ ;;
+ esac
+ fi
+ return 0
+}
+
+# args:
+# channel - $1
+get_normalized_channel() {
+ eval $invocation
+
+ local channel="$(to_lowercase "$1")"
+
+ if [[ $channel == current ]]; then
+ say_warning 'Value "Current" is deprecated for -Channel option. Use "STS" instead.'
+ fi
+
+ if [[ $channel == release/* ]]; then
+ say_warning 'Using branch name with -Channel option is no longer supported with newer releases. Use -Quality option with a channel in X.Y format instead.';
+ fi
+
+ if [ ! -z "$channel" ]; then
+ case "$channel" in
+ lts)
+ echo "LTS"
+ return 0
+ ;;
+ sts)
+ echo "STS"
+ return 0
+ ;;
+ current)
+ echo "STS"
+ return 0
+ ;;
+ *)
+ echo "$channel"
+ return 0
+ ;;
+ esac
+ fi
+
+ return 0
+}
+
+# args:
+# runtime - $1
+get_normalized_product() {
+ eval $invocation
+
+ local product=""
+ local runtime="$(to_lowercase "$1")"
+ if [[ "$runtime" == "dotnet" ]]; then
+ product="dotnet-runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ product="aspnetcore-runtime"
+ elif [ -z "$runtime" ]; then
+ product="dotnet-sdk"
+ fi
+ echo "$product"
+ return 0
+}
+
+# The version text returned from the feeds is a 1-line or 2-line string:
+# For the SDK and the dotnet runtime (2 lines):
+# Line 1: # commit_hash
+# Line 2: # 4-part version
+# For the aspnetcore runtime (1 line):
+# Line 1: # 4-part version
+
+# args:
+# version_text - stdin
+get_version_from_latestversion_file_content() {
+ eval $invocation
+
+ cat | tail -n 1 | sed 's/\r$//'
+ return 0
+}
+
+# args:
+# install_root - $1
+# relative_path_to_package - $2
+# specific_version - $3
+is_dotnet_package_installed() {
+ eval $invocation
+
+ local install_root="$1"
+ local relative_path_to_package="$2"
+ local specific_version="${3//[$'\t\r\n']}"
+
+ local dotnet_package_path="$(combine_paths "$(combine_paths "$install_root" "$relative_path_to_package")" "$specific_version")"
+ say_verbose "is_dotnet_package_installed: dotnet_package_path=$dotnet_package_path"
+
+ if [ -d "$dotnet_package_path" ]; then
+ return 0
+ else
+ return 1
+ fi
+}
+
+# args:
+# downloaded file - $1
+# remote_file_size - $2
+validate_remote_local_file_sizes()
+{
+ eval $invocation
+
+ local downloaded_file="$1"
+ local remote_file_size="$2"
+ local file_size=''
+
+ if [[ "$OSTYPE" == "linux-gnu"* ]]; then
+ file_size="$(stat -c '%s' "$downloaded_file")"
+ elif [[ "$OSTYPE" == "darwin"* ]]; then
+ # hardcode in order to avoid conflicts with GNU stat
+ file_size="$(/usr/bin/stat -f '%z' "$downloaded_file")"
+ fi
+
+ if [ -n "$file_size" ]; then
+ say "Downloaded file size is $file_size bytes."
+
+ if [ -n "$remote_file_size" ] && [ -n "$file_size" ]; then
+ if [ "$remote_file_size" -ne "$file_size" ]; then
+ say "The remote and local file sizes are not equal. The remote file size is $remote_file_size bytes and the local size is $file_size bytes. The local package may be corrupted."
+ else
+ say "The remote and local file sizes are equal."
+ fi
+ fi
+
+ else
+ say "Either downloaded or local package size can not be measured. One of them may be corrupted."
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+get_version_from_latestversion_file() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+
+ local version_file_url=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ version_file_url="$azure_feed/Runtime/$channel/latest.version"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ version_file_url="$azure_feed/aspnetcore/Runtime/$channel/latest.version"
+ elif [ -z "$runtime" ]; then
+ version_file_url="$azure_feed/Sdk/$channel/latest.version"
+ else
+ say_err "Invalid value for \$runtime"
+ return 1
+ fi
+ say_verbose "get_version_from_latestversion_file: latest url: $version_file_url"
+
+ download "$version_file_url" || return $?
+ return 0
+}
+
+# args:
+# json_file - $1
+parse_globaljson_file_for_version() {
+ eval $invocation
+
+ local json_file="$1"
+ if [ ! -f "$json_file" ]; then
+ say_err "Unable to find \`$json_file\`"
+ return 1
+ fi
+
+ sdk_section=$(cat $json_file | tr -d "\r" | awk '/"sdk"/,/}/')
+ if [ -z "$sdk_section" ]; then
+ say_err "Unable to parse the SDK node in \`$json_file\`"
+ return 1
+ fi
+
+ sdk_list=$(echo $sdk_section | awk -F"[{}]" '{print $2}')
+ sdk_list=${sdk_list//[\" ]/}
+ sdk_list=${sdk_list//,/$'\n'}
+
+ local version_info=""
+ while read -r line; do
+ IFS=:
+ while read -r key value; do
+ if [[ "$key" == "version" ]]; then
+ version_info=$value
+ fi
+ done <<< "$line"
+ done <<< "$sdk_list"
+ if [ -z "$version_info" ]; then
+ say_err "Unable to find the SDK:version node in \`$json_file\`"
+ return 1
+ fi
+
+ unset IFS;
+ echo "$version_info"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# version - $4
+# json_file - $5
+get_specific_version_from_version() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local version="$(to_lowercase "$4")"
+ local json_file="$5"
+
+ if [ -z "$json_file" ]; then
+ if [[ "$version" == "latest" ]]; then
+ local version_info
+ version_info="$(get_version_from_latestversion_file "$azure_feed" "$channel" "$normalized_architecture" false)" || return 1
+ say_verbose "get_specific_version_from_version: version_info=$version_info"
+ echo "$version_info" | get_version_from_latestversion_file_content
+ return 0
+ else
+ echo "$version"
+ return 0
+ fi
+ else
+ local version_info
+ version_info="$(parse_globaljson_file_for_version "$json_file")" || return 1
+ echo "$version_info"
+ return 0
+ fi
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+# normalized_os - $5
+construct_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+ local specific_product_version="$(get_specific_product_version "$1" "$4")"
+ local osname="$5"
+
+ local download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/dotnet-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/aspnetcore-runtime-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/dotnet-sdk-$specific_product_version-$osname-$normalized_architecture.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+# download link - $3 (optional)
+get_specific_product_version() {
+ # If we find a 'productVersion.txt' at the root of any folder, we'll use its contents
+ # to resolve the version of what's in the folder, superseding the specified version.
+ # if 'productVersion.txt' is missing but download link is already available, product version will be taken from download link
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="${2//[$'\t\r\n']}"
+ local package_download_link=""
+ if [ $# -gt 2 ]; then
+ local package_download_link="$3"
+ fi
+ local specific_product_version=null
+
+ # Try to get the version number, using the productVersion.txt file located next to the installer file.
+ local download_links=($(get_specific_product_version_url "$azure_feed" "$specific_version" true "$package_download_link")
+ $(get_specific_product_version_url "$azure_feed" "$specific_version" false "$package_download_link"))
+
+ for download_link in "${download_links[@]}"
+ do
+ say_verbose "Checking for the existence of $download_link"
+
+ if machine_has "curl"
+ then
+ if ! specific_product_version=$(curl -s --fail "${download_link}${feed_credential}" 2>&1); then
+ continue
+ else
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+ fi
+
+ elif machine_has "wget"
+ then
+ specific_product_version=$(wget -qO- "${download_link}${feed_credential}" 2>&1)
+ if [ $? = 0 ]; then
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+ fi
+ fi
+ done
+
+ # Getting the version number with productVersion.txt has failed. Try parsing the download link for a version number.
+ say_verbose "Failed to get the version using productVersion.txt file. Download link will be parsed instead."
+ specific_product_version="$(get_product_specific_version_from_download_link "$package_download_link" "$specific_version")"
+ echo "${specific_product_version//[$'\t\r\n']}"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# specific_version - $2
+# is_flattened - $3
+# download link - $4 (optional)
+get_specific_product_version_url() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local specific_version="$2"
+ local is_flattened="$3"
+ local package_download_link=""
+ if [ $# -gt 3 ]; then
+ local package_download_link="$4"
+ fi
+
+ local pvFileName="productVersion.txt"
+ if [ "$is_flattened" = true ]; then
+ if [ -z "$runtime" ]; then
+ pvFileName="sdk-productVersion.txt"
+ elif [[ "$runtime" == "dotnet" ]]; then
+ pvFileName="runtime-productVersion.txt"
+ else
+ pvFileName="$runtime-productVersion.txt"
+ fi
+ fi
+
+ local download_link=null
+
+ if [ -z "$package_download_link" ]; then
+ if [[ "$runtime" == "dotnet" ]]; then
+ download_link="$azure_feed/Runtime/$specific_version/${pvFileName}"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ download_link="$azure_feed/aspnetcore/Runtime/$specific_version/${pvFileName}"
+ elif [ -z "$runtime" ]; then
+ download_link="$azure_feed/Sdk/$specific_version/${pvFileName}"
+ else
+ return 1
+ fi
+ else
+ download_link="${package_download_link%/*}/${pvFileName}"
+ fi
+
+ say_verbose "Constructed productVersion link: $download_link"
+ echo "$download_link"
+ return 0
+}
+
+# args:
+# download link - $1
+# specific version - $2
+get_product_specific_version_from_download_link()
+{
+ eval $invocation
+
+ local download_link="$1"
+ local specific_version="$2"
+ local specific_product_version=""
+
+ if [ -z "$download_link" ]; then
+ echo "$specific_version"
+ return 0
+ fi
+
+ #get filename
+ filename="${download_link##*/}"
+
+ #product specific version follows the product name
+ #for filename 'dotnet-sdk-3.1.404-linux-x64.tar.gz': the product version is 3.1.404
+ IFS='-'
+ read -ra filename_elems <<< "$filename"
+ count=${#filename_elems[@]}
+ if [[ "$count" -gt 2 ]]; then
+ specific_product_version="${filename_elems[2]}"
+ else
+ specific_product_version=$specific_version
+ fi
+ unset IFS;
+ echo "$specific_product_version"
+ return 0
+}
+
+# args:
+# azure_feed - $1
+# channel - $2
+# normalized_architecture - $3
+# specific_version - $4
+construct_legacy_download_link() {
+ eval $invocation
+
+ local azure_feed="$1"
+ local channel="$2"
+ local normalized_architecture="$3"
+ local specific_version="${4//[$'\t\r\n']}"
+
+ local distro_specific_osname
+ distro_specific_osname="$(get_legacy_os_name)" || return 1
+
+ local legacy_download_link=null
+ if [[ "$runtime" == "dotnet" ]]; then
+ legacy_download_link="$azure_feed/Runtime/$specific_version/dotnet-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ elif [ -z "$runtime" ]; then
+ legacy_download_link="$azure_feed/Sdk/$specific_version/dotnet-dev-$distro_specific_osname-$normalized_architecture.$specific_version.tar.gz"
+ else
+ return 1
+ fi
+
+ echo "$legacy_download_link"
+ return 0
+}
+
+get_user_install_path() {
+ eval $invocation
+
+ if [ ! -z "${DOTNET_INSTALL_DIR:-}" ]; then
+ echo "$DOTNET_INSTALL_DIR"
+ else
+ echo "$HOME/.dotnet"
+ fi
+ return 0
+}
+
+# args:
+# install_dir - $1
+resolve_installation_path() {
+ eval $invocation
+
+ local install_dir=$1
+ if [ "$install_dir" = "" ]; then
+ local user_install_path="$(get_user_install_path)"
+ say_verbose "resolve_installation_path: user_install_path=$user_install_path"
+ echo "$user_install_path"
+ return 0
+ fi
+
+ echo "$install_dir"
+ return 0
+}
+
+# args:
+# relative_or_absolute_path - $1
+get_absolute_path() {
+ eval $invocation
+
+ local relative_or_absolute_path=$1
+ echo "$(cd "$(dirname "$1")" && pwd -P)/$(basename "$1")"
+ return 0
+}
+
+# args:
+# override - $1 (boolean, true or false)
+get_cp_options() {
+ eval $invocation
+
+ local override="$1"
+ local override_switch=""
+
+ if [ "$override" = false ]; then
+ override_switch="-n"
+
+ # create temporary files to check if 'cp -u' is supported
+ tmp_dir="$(mktemp -d)"
+ tmp_file="$tmp_dir/testfile"
+ tmp_file2="$tmp_dir/testfile2"
+
+ touch "$tmp_file"
+
+ # use -u instead of -n if it's available
+ if cp -u "$tmp_file" "$tmp_file2" 2>/dev/null; then
+ override_switch="-u"
+ fi
+
+ # clean up
+ rm -f "$tmp_file" "$tmp_file2"
+ rm -rf "$tmp_dir"
+ fi
+
+ echo "$override_switch"
+}
+
+# args:
+# input_files - stdin
+# root_path - $1
+# out_path - $2
+# override - $3
+copy_files_or_dirs_from_list() {
+ eval $invocation
+
+ local root_path="$(remove_trailing_slash "$1")"
+ local out_path="$(remove_trailing_slash "$2")"
+ local override="$3"
+ local override_switch="$(get_cp_options "$override")"
+
+ cat | uniq | while read -r file_path; do
+ local path="$(remove_beginning_slash "${file_path#$root_path}")"
+ local target="$out_path/$path"
+ if [ "$override" = true ] || (! ([ -d "$target" ] || [ -e "$target" ])); then
+ mkdir -p "$out_path/$(dirname "$path")"
+ if [ -d "$target" ]; then
+ rm -rf "$target"
+ fi
+ cp -R $override_switch "$root_path/$path" "$target"
+ fi
+ done
+}
+
+# args:
+# zip_uri - $1
+get_remote_file_size() {
+ local zip_uri="$1"
+
+ if machine_has "curl"; then
+ file_size=$(curl -sI "$zip_uri" | grep -i content-length | awk '{ num = $2 + 0; print num }')
+ elif machine_has "wget"; then
+ file_size=$(wget --spider --server-response -O /dev/null "$zip_uri" 2>&1 | grep -i 'Content-Length:' | awk '{ num = $2 + 0; print num }')
+ else
+ say "Neither curl nor wget is available on this system."
+ return
+ fi
+
+ if [ -n "$file_size" ]; then
+ say "Remote file $zip_uri size is $file_size bytes."
+ echo "$file_size"
+ else
+ say_verbose "Content-Length header was not extracted for $zip_uri."
+ echo ""
+ fi
+}
+
+# args:
+# zip_path - $1
+# out_path - $2
+# remote_file_size - $3
+extract_dotnet_package() {
+ eval $invocation
+
+ local zip_path="$1"
+ local out_path="$2"
+ local remote_file_size="$3"
+
+ local temp_out_path="$(mktemp -d "$temporary_file_template")"
+
+ local failed=false
+ tar -xzf "$zip_path" -C "$temp_out_path" > /dev/null || failed=true
+
+ local folders_with_version_regex='^.*/[0-9]+\.[0-9]+[^/]+/'
+ find "$temp_out_path" -type f | grep -Eo "$folders_with_version_regex" | sort | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" false
+ find "$temp_out_path" -type f | grep -Ev "$folders_with_version_regex" | copy_files_or_dirs_from_list "$temp_out_path" "$out_path" "$override_non_versioned_files"
+
+ validate_remote_local_file_sizes "$zip_path" "$remote_file_size"
+
+ rm -rf "$temp_out_path"
+ if [ -z ${keep_zip+x} ]; then
+ rm -f "$zip_path" && say_verbose "Temporary archive file $zip_path was removed"
+ fi
+
+ if [ "$failed" = true ]; then
+ say_err "Extraction failed"
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header()
+{
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+
+ local failed=false
+ local response
+ if machine_has "curl"; then
+ get_http_header_curl $remote_path $disable_feed_credential || failed=true
+ elif machine_has "wget"; then
+ get_http_header_wget $remote_path $disable_feed_credential || failed=true
+ else
+ failed=true
+ fi
+ if [ "$failed" = true ]; then
+ say_verbose "Failed to get HTTP header: '$remote_path'."
+ return 1
+ fi
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header_curl() {
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+
+ remote_path_with_credential="$remote_path"
+ if [ "$disable_feed_credential" = false ]; then
+ remote_path_with_credential+="$feed_credential"
+ fi
+
+ curl_options="-I -sSL --retry 5 --retry-delay 2 --connect-timeout 15 "
+ curl $curl_options "$remote_path_with_credential" 2>&1 || return 1
+ return 0
+}
+
+# args:
+# remote_path - $1
+# disable_feed_credential - $2
+get_http_header_wget() {
+ eval $invocation
+ local remote_path="$1"
+ local disable_feed_credential="$2"
+ local wget_options="-q -S --spider --tries 5 "
+
+ local wget_options_extra=''
+
+ # Test for options that aren't supported on all wget implementations.
+ if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then
+ wget_options_extra="--waitretry 2 --connect-timeout 15 "
+ else
+ say "wget extra options are unavailable for this environment"
+ fi
+
+ remote_path_with_credential="$remote_path"
+ if [ "$disable_feed_credential" = false ]; then
+ remote_path_with_credential+="$feed_credential"
+ fi
+
+ wget $wget_options $wget_options_extra "$remote_path_with_credential" 2>&1
+
+ return $?
+}
+
+# args:
+# remote_path - $1
+# [out_path] - $2 - stdout if not provided
+download() {
+ eval $invocation
+
+ local remote_path="$1"
+ local out_path="${2:-}"
+
+ if [[ "$remote_path" != "http"* ]]; then
+ cp "$remote_path" "$out_path"
+ return $?
+ fi
+
+ local failed=false
+ local attempts=0
+ while [ $attempts -lt 3 ]; do
+ attempts=$((attempts+1))
+ failed=false
+ if machine_has "curl"; then
+ downloadcurl "$remote_path" "$out_path" || failed=true
+ elif machine_has "wget"; then
+ downloadwget "$remote_path" "$out_path" || failed=true
+ else
+ say_err "Missing dependency: neither curl nor wget was found."
+ exit 1
+ fi
+
+ if [ "$failed" = false ] || [ $attempts -ge 3 ] || { [ -n "${http_code-}" ] && [ "${http_code}" = "404" ]; }; then
+ break
+ fi
+
+ say "Download attempt #$attempts has failed: ${http_code-} ${download_error_msg-}"
+ say "Attempt #$((attempts+1)) will start in $((attempts*10)) seconds."
+ sleep $((attempts*10))
+ done
+
+ if [ "$failed" = true ]; then
+ say_verbose "Download failed: $remote_path"
+ return 1
+ fi
+ return 0
+}
+
+# Updates global variables $http_code and $download_error_msg
+downloadcurl() {
+ eval $invocation
+ unset http_code
+ unset download_error_msg
+ local remote_path="$1"
+ local out_path="${2:-}"
+ # Append feed_credential as late as possible before calling curl to avoid logging feed_credential
+ # Avoid passing URI with credentials to functions: note, most of them echoing parameters of invocation in verbose output.
+ local remote_path_with_credential="${remote_path}${feed_credential}"
+ local curl_options="--retry 20 --retry-delay 2 --connect-timeout 15 -sSL -f --create-dirs "
+ local curl_exit_code=0;
+ if [ -z "$out_path" ]; then
+ curl_output=$(curl $curl_options "$remote_path_with_credential" 2>&1)
+ curl_exit_code=$?
+ echo "$curl_output"
+ else
+ curl_output=$(curl $curl_options -o "$out_path" "$remote_path_with_credential" 2>&1)
+ curl_exit_code=$?
+ fi
+
+ # Regression in curl causes curl with --retry to return a 0 exit code even when it fails to download a file - https://github.com/curl/curl/issues/17554
+ if [ $curl_exit_code -eq 0 ] && echo "$curl_output" | grep -q "^curl: ([0-9]*) "; then
+ curl_exit_code=$(echo "$curl_output" | sed 's/curl: (\([0-9]*\)).*/\1/')
+ fi
+
+ if [ $curl_exit_code -gt 0 ]; then
+ download_error_msg="Unable to download $remote_path."
+ # Check for curl timeout codes
+ if [[ $curl_exit_code == 7 || $curl_exit_code == 28 ]]; then
+ download_error_msg+=" Failed to reach the server: connection timeout."
+ else
+ local disable_feed_credential=false
+ local response=$(get_http_header_curl $remote_path $disable_feed_credential)
+ http_code=$( echo "$response" | awk '/^HTTP/{print $2}' | tail -1 )
+ if [[ ! -z $http_code && $http_code != 2* ]]; then
+ download_error_msg+=" Returned HTTP status code: $http_code."
+ fi
+ fi
+ say_verbose "$download_error_msg"
+ return 1
+ fi
+ return 0
+}
+
+
+# Updates global variables $http_code and $download_error_msg
+downloadwget() {
+ eval $invocation
+ unset http_code
+ unset download_error_msg
+ local remote_path="$1"
+ local out_path="${2:-}"
+ # Append feed_credential as late as possible before calling wget to avoid logging feed_credential
+ local remote_path_with_credential="${remote_path}${feed_credential}"
+ local wget_options="--tries 20 "
+
+ local wget_options_extra=''
+ local wget_result=''
+
+ # Test for options that aren't supported on all wget implementations.
+ if [[ $(wget -h 2>&1 | grep -E 'waitretry|connect-timeout') ]]; then
+ wget_options_extra="--waitretry 2 --connect-timeout 15 "
+ else
+ say "wget extra options are unavailable for this environment"
+ fi
+
+ if [ -z "$out_path" ]; then
+ wget -q $wget_options $wget_options_extra -O - "$remote_path_with_credential" 2>&1
+ wget_result=$?
+ else
+ wget $wget_options $wget_options_extra -O "$out_path" "$remote_path_with_credential" 2>&1
+ wget_result=$?
+ fi
+
+ if [[ $wget_result != 0 ]]; then
+ local disable_feed_credential=false
+ local response=$(get_http_header_wget $remote_path $disable_feed_credential)
+ http_code=$( echo "$response" | awk '/^ HTTP/{print $2}' | tail -1 )
+ download_error_msg="Unable to download $remote_path."
+ if [[ ! -z $http_code && $http_code != 2* ]]; then
+ download_error_msg+=" Returned HTTP status code: $http_code."
+ # wget exit code 4 stands for network-issue
+ elif [[ $wget_result == 4 ]]; then
+ download_error_msg+=" Failed to reach the server: connection timeout."
+ fi
+ say_verbose "$download_error_msg"
+ return 1
+ fi
+
+ return 0
+}
+
+get_download_link_from_aka_ms() {
+ eval $invocation
+
+ #quality is not supported for LTS or STS channel
+ #STS maps to current
+ if [[ ! -z "$normalized_quality" && ("$normalized_channel" == "LTS" || "$normalized_channel" == "STS") ]]; then
+ normalized_quality=""
+ say_warning "Specifying quality for STS or LTS channel is not supported, the quality will be ignored."
+ fi
+
+ say_verbose "Retrieving primary payload URL from aka.ms for channel: '$normalized_channel', quality: '$normalized_quality', product: '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
+
+ #construct aka.ms link
+ aka_ms_link="https://aka.ms/dotnet"
+ if [ "$internal" = true ]; then
+ aka_ms_link="$aka_ms_link/internal"
+ fi
+ aka_ms_link="$aka_ms_link/$normalized_channel"
+ if [[ ! -z "$normalized_quality" ]]; then
+ aka_ms_link="$aka_ms_link/$normalized_quality"
+ fi
+ aka_ms_link="$aka_ms_link/$normalized_product-$normalized_os-$normalized_architecture.tar.gz"
+ say_verbose "Constructed aka.ms link: '$aka_ms_link'."
+
+ #get HTTP response
+ #do not pass credentials as a part of the $aka_ms_link and do not apply credentials in the get_http_header function
+ #otherwise the redirect link would have credentials as well
+ #it would result in applying credentials twice to the resulting link and thus breaking it, and in echoing credentials to the output as a part of redirect link
+ disable_feed_credential=true
+ response="$(get_http_header $aka_ms_link $disable_feed_credential)"
+
+ say_verbose "Received response: $response"
+ # Get results of all the redirects.
+ http_codes=$( echo "$response" | awk '$1 ~ /^HTTP/ {print $2}' )
+ # They all need to be 301, otherwise some links are broken (except for the last, which is not a redirect but 200 or 404).
+ broken_redirects=$( echo "$http_codes" | sed '$d' | grep -v '301' )
+ # The response may end without final code 2xx/4xx/5xx somehow, e.g. network restrictions on www.bing.com causes redirecting to bing.com fails with connection refused.
+ # In this case it should not exclude the last.
+ last_http_code=$( echo "$http_codes" | tail -n 1 )
+ if ! [[ $last_http_code =~ ^(2|4|5)[0-9][0-9]$ ]]; then
+ broken_redirects=$( echo "$http_codes" | grep -v '301' )
+ fi
+
+ # All HTTP codes are 301 (Moved Permanently), the redirect link exists.
+ if [[ -z "$broken_redirects" ]]; then
+ aka_ms_download_link=$( echo "$response" | awk '$1 ~ /^Location/{print $2}' | tail -1 | tr -d '\r')
+
+ if [[ -z "$aka_ms_download_link" ]]; then
+ say_verbose "The aka.ms link '$aka_ms_link' is not valid: failed to get redirect location."
+ return 1
+ fi
+
+ say_verbose "The redirect location retrieved: '$aka_ms_download_link'."
+ return 0
+ else
+ say_verbose "The aka.ms link '$aka_ms_link' is not valid: received HTTP code: $(echo "$broken_redirects" | paste -sd "," -)."
+ return 1
+ fi
+}
+
+get_feeds_to_use()
+{
+ feeds=(
+ "https://builds.dotnet.microsoft.com/dotnet"
+ "https://ci.dot.net/public"
+ )
+
+ if [[ -n "$azure_feed" ]]; then
+ feeds=("$azure_feed")
+ fi
+
+ if [[ -n "$uncached_feed" ]]; then
+ feeds=("$uncached_feed")
+ fi
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed).
+generate_download_links() {
+
+ download_links=()
+ specific_versions=()
+ effective_versions=()
+ link_types=()
+
+ # If generate_akams_links returns false, no fallback to old links. Just terminate.
+ # This function may also 'exit' (if the determined version is already installed).
+ generate_akams_links || return
+
+ # Check other feeds only if we haven't been able to find an aka.ms link.
+ if [[ "${#download_links[@]}" -lt 1 ]]; then
+ for feed in ${feeds[@]}
+ do
+ # generate_regular_links may also 'exit' (if the determined version is already installed).
+ generate_regular_links $feed || return
+ done
+ fi
+
+ if [[ "${#download_links[@]}" -eq 0 ]]; then
+ say_err "Failed to resolve the exact version number."
+ return 1
+ fi
+
+ say_verbose "Generated ${#download_links[@]} links."
+ for link_index in ${!download_links[@]}
+ do
+ say_verbose "Link $link_index: ${link_types[$link_index]}, ${effective_versions[$link_index]}, ${download_links[$link_index]}"
+ done
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed).
+generate_akams_links() {
+ local valid_aka_ms_link=true;
+
+ normalized_version="$(to_lowercase "$version")"
+ if [[ "$normalized_version" != "latest" ]] && [ -n "$normalized_quality" ]; then
+ say_err "Quality and Version options are not allowed to be specified simultaneously. See https://learn.microsoft.com/dotnet/core/tools/dotnet-install-script#options for details."
+ return 1
+ fi
+
+ if [[ -n "$json_file" || "$normalized_version" != "latest" ]]; then
+ # aka.ms links are not needed when exact version is specified via command or json file
+ return
+ fi
+
+ get_download_link_from_aka_ms || valid_aka_ms_link=false
+
+ if [[ "$valid_aka_ms_link" == true ]]; then
+ say_verbose "Retrieved primary payload URL from aka.ms link: '$aka_ms_download_link'."
+ say_verbose "Downloading using legacy url will not be attempted."
+
+ download_link=$aka_ms_download_link
+
+ #get version from the path
+ IFS='/'
+ read -ra pathElems <<< "$download_link"
+ count=${#pathElems[@]}
+ specific_version="${pathElems[count-2]}"
+ unset IFS;
+ say_verbose "Version: '$specific_version'."
+
+ #Retrieve effective version
+ effective_version="$(get_specific_product_version "$azure_feed" "$specific_version" "$download_link")"
+
+ # Add link info to arrays
+ download_links+=($download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("aka.ms")
+
+ # Check if the SDK version is already installed.
+ if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "$asset_name with version '$effective_version' is already installed."
+ exit 0
+ fi
+
+ return 0
+ fi
+
+ # if quality is specified - exit with error - there is no fallback approach
+ if [ ! -z "$normalized_quality" ]; then
+ say_err "Failed to locate the latest version in the channel '$normalized_channel' with '$normalized_quality' quality for '$normalized_product', os: '$normalized_os', architecture: '$normalized_architecture'."
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support."
+ return 1
+ fi
+ say_verbose "Falling back to latest.version file approach."
+}
+
+# THIS FUNCTION MAY EXIT (if the determined version is already installed)
+# args:
+# feed - $1
+generate_regular_links() {
+ local feed="$1"
+ local valid_legacy_download_link=true
+
+ specific_version=$(get_specific_version_from_version "$feed" "$channel" "$normalized_architecture" "$version" "$json_file") || specific_version='0'
+
+ if [[ "$specific_version" == '0' ]]; then
+ say_verbose "Failed to resolve the specific version number using feed '$feed'"
+ return
+ fi
+
+ effective_version="$(get_specific_product_version "$feed" "$specific_version")"
+ say_verbose "specific_version=$specific_version"
+
+ download_link="$(construct_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version" "$normalized_os")"
+ say_verbose "Constructed primary named payload URL: $download_link"
+
+ # Add link info to arrays
+ download_links+=($download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("primary")
+
+ legacy_download_link="$(construct_legacy_download_link "$feed" "$channel" "$normalized_architecture" "$specific_version")" || valid_legacy_download_link=false
+
+ if [ "$valid_legacy_download_link" = true ]; then
+ say_verbose "Constructed legacy named payload URL: $legacy_download_link"
+
+ download_links+=($legacy_download_link)
+ specific_versions+=($specific_version)
+ effective_versions+=($effective_version)
+ link_types+=("legacy")
+ else
+ legacy_download_link=""
+ say_verbose "Could not construct a legacy_download_link; omitting..."
+ fi
+
+ # Check if the SDK version is already installed.
+ if [[ "$dry_run" != true ]] && is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "$asset_name with version '$effective_version' is already installed."
+ exit 0
+ fi
+}
+
+print_dry_run() {
+
+ say "Payload URLs:"
+
+ for link_index in "${!download_links[@]}"
+ do
+ say "URL #$link_index - ${link_types[$link_index]}: ${download_links[$link_index]}"
+ done
+
+ resolved_version=${specific_versions[0]}
+ repeatable_command="./$script_name --version "\""$resolved_version"\"" --install-dir "\""$install_root"\"" --architecture "\""$normalized_architecture"\"" --os "\""$normalized_os"\"""
+
+ if [ ! -z "$normalized_quality" ]; then
+ repeatable_command+=" --quality "\""$normalized_quality"\"""
+ fi
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ repeatable_command+=" --runtime "\""dotnet"\"""
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ repeatable_command+=" --runtime "\""aspnetcore"\"""
+ fi
+
+ repeatable_command+="$non_dynamic_parameters"
+
+ if [ -n "$feed_credential" ]; then
+ repeatable_command+=" --feed-credential "\"""\"""
+ fi
+
+ say "Repeatable invocation: $repeatable_command"
+}
+
+calculate_vars() {
+ eval $invocation
+
+ script_name=$(basename "$0")
+ normalized_architecture="$(get_normalized_architecture_from_architecture "$architecture")"
+ say_verbose "Normalized architecture: '$normalized_architecture'."
+ normalized_os="$(get_normalized_os "$user_defined_os")"
+ say_verbose "Normalized OS: '$normalized_os'."
+ normalized_quality="$(get_normalized_quality "$quality")"
+ say_verbose "Normalized quality: '$normalized_quality'."
+ normalized_channel="$(get_normalized_channel "$channel")"
+ say_verbose "Normalized channel: '$normalized_channel'."
+ normalized_product="$(get_normalized_product "$runtime")"
+ say_verbose "Normalized product: '$normalized_product'."
+ install_root="$(resolve_installation_path "$install_dir")"
+ say_verbose "InstallRoot: '$install_root'."
+
+ normalized_architecture="$(get_normalized_architecture_for_specific_sdk_version "$version" "$normalized_channel" "$normalized_architecture")"
+
+ if [[ "$runtime" == "dotnet" ]]; then
+ asset_relative_path="shared/Microsoft.NETCore.App"
+ asset_name=".NET Core Runtime"
+ elif [[ "$runtime" == "aspnetcore" ]]; then
+ asset_relative_path="shared/Microsoft.AspNetCore.App"
+ asset_name="ASP.NET Core Runtime"
+ elif [ -z "$runtime" ]; then
+ asset_relative_path="sdk"
+ asset_name=".NET Core SDK"
+ fi
+
+ get_feeds_to_use
+}
+
+install_dotnet() {
+ eval $invocation
+ local download_failed=false
+ local download_completed=false
+ local remote_file_size=0
+
+ mkdir -p "$install_root"
+ zip_path="${zip_path:-$(mktemp "$temporary_file_template")}"
+ say_verbose "Archive path: $zip_path"
+
+ for link_index in "${!download_links[@]}"
+ do
+ download_link="${download_links[$link_index]}"
+ specific_version="${specific_versions[$link_index]}"
+ effective_version="${effective_versions[$link_index]}"
+ link_type="${link_types[$link_index]}"
+
+ say "Attempting to download using $link_type link $download_link"
+
+ # The download function will set variables $http_code and $download_error_msg in case of failure.
+ download_failed=false
+ download "$download_link" "$zip_path" 2>&1 || download_failed=true
+
+ if [ "$download_failed" = true ]; then
+ case ${http_code-} in
+ 404)
+ say "The resource at $link_type link '$download_link' is not available."
+ ;;
+ *)
+ say "Failed to download $link_type link '$download_link': ${http_code-} ${download_error_msg-}"
+ ;;
+ esac
+ rm -f "$zip_path" 2>&1 && say_verbose "Temporary archive file $zip_path was removed"
+ else
+ download_completed=true
+ break
+ fi
+ done
+
+ if [[ "$download_completed" == false ]]; then
+ say_err "Could not find \`$asset_name\` with version = $specific_version"
+ say_err "Refer to: https://aka.ms/dotnet-os-lifecycle for information on .NET Core support"
+ return 1
+ fi
+
+ remote_file_size="$(get_remote_file_size "$download_link")"
+
+ say "Extracting archive from $download_link"
+ extract_dotnet_package "$zip_path" "$install_root" "$remote_file_size" || return 1
+
+ # Check if the SDK version is installed; if not, fail the installation.
+ # if the version contains "RTM" or "servicing"; check if a 'release-type' SDK version is installed.
+ if [[ $specific_version == *"rtm"* || $specific_version == *"servicing"* ]]; then
+ IFS='-'
+ read -ra verArr <<< "$specific_version"
+ release_version="${verArr[0]}"
+ unset IFS;
+ say_verbose "Checking installation: version = $release_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$release_version"; then
+ say "Installed version is $effective_version"
+ return 0
+ fi
+ fi
+
+ # Check if the standard SDK version is installed.
+ say_verbose "Checking installation: version = $effective_version"
+ if is_dotnet_package_installed "$install_root" "$asset_relative_path" "$effective_version"; then
+ say "Installed version is $effective_version"
+ return 0
+ fi
+
+ # Version verification failed. More likely something is wrong either with the downloaded content or with the verification algorithm.
+ say_err "Failed to verify the version of installed \`$asset_name\`.\nInstallation source: $download_link.\nInstallation location: $install_root.\nReport the bug at https://github.com/dotnet/install-scripts/issues."
+ say_err "\`$asset_name\` with version = $effective_version failed to install with an error."
+ return 1
+}
+
+args=("$@")
+
+local_version_file_relative_path="/.version"
+bin_folder_relative_path=""
+temporary_file_template="${TMPDIR:-/tmp}/dotnet.XXXXXXXXX"
+
+channel="LTS"
+version="Latest"
+json_file=""
+install_dir=""
+architecture=""
+dry_run=false
+no_path=false
+azure_feed=""
+uncached_feed=""
+feed_credential=""
+verbose=false
+runtime=""
+runtime_id=""
+quality=""
+internal=false
+override_non_versioned_files=true
+non_dynamic_parameters=""
+user_defined_os=""
+
+while [ $# -ne 0 ]
+do
+ name="$1"
+ case "$name" in
+ -c|--channel|-[Cc]hannel)
+ shift
+ channel="$1"
+ ;;
+ -v|--version|-[Vv]ersion)
+ shift
+ version="$1"
+ ;;
+ -q|--quality|-[Qq]uality)
+ shift
+ quality="$1"
+ ;;
+ --internal|-[Ii]nternal)
+ internal=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ -i|--install-dir|-[Ii]nstall[Dd]ir)
+ shift
+ install_dir="$1"
+ ;;
+ --arch|--architecture|-[Aa]rch|-[Aa]rchitecture)
+ shift
+ architecture="$1"
+ ;;
+ --os|-[Oo][SS])
+ shift
+ user_defined_os="$1"
+ ;;
+ --shared-runtime|-[Ss]hared[Rr]untime)
+ say_warning "The --shared-runtime flag is obsolete and may be removed in a future version of this script. The recommended usage is to specify '--runtime dotnet'."
+ if [ -z "$runtime" ]; then
+ runtime="dotnet"
+ fi
+ ;;
+ --runtime|-[Rr]untime)
+ shift
+ runtime="$1"
+ if [[ "$runtime" != "dotnet" ]] && [[ "$runtime" != "aspnetcore" ]]; then
+ say_err "Unsupported value for --runtime: '$1'. Valid values are 'dotnet' and 'aspnetcore'."
+ if [[ "$runtime" == "windowsdesktop" ]]; then
+ say_err "WindowsDesktop archives are manufactured for Windows platforms only."
+ fi
+ exit 1
+ fi
+ ;;
+ --dry-run|-[Dd]ry[Rr]un)
+ dry_run=true
+ ;;
+ --no-path|-[Nn]o[Pp]ath)
+ no_path=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --verbose|-[Vv]erbose)
+ verbose=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --azure-feed|-[Aa]zure[Ff]eed)
+ shift
+ azure_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --uncached-feed|-[Uu]ncached[Ff]eed)
+ shift
+ uncached_feed="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ ;;
+ --feed-credential|-[Ff]eed[Cc]redential)
+ shift
+ feed_credential="$1"
+ #feed_credential should start with "?", for it to be added to the end of the link.
+ #adding "?" at the beginning of the feed_credential if needed.
+ [[ -z "$(echo $feed_credential)" ]] || [[ $feed_credential == \?* ]] || feed_credential="?$feed_credential"
+ ;;
+ --runtime-id|-[Rr]untime[Ii]d)
+ shift
+ runtime_id="$1"
+ non_dynamic_parameters+=" $name "\""$1"\"""
+ say_warning "Use of --runtime-id is obsolete and should be limited to the versions below 2.1. To override architecture, use --architecture option instead. To override OS, use --os option instead."
+ ;;
+ --jsonfile|-[Jj][Ss]on[Ff]ile)
+ shift
+ json_file="$1"
+ ;;
+ --skip-non-versioned-files|-[Ss]kip[Nn]on[Vv]ersioned[Ff]iles)
+ override_non_versioned_files=false
+ non_dynamic_parameters+=" $name"
+ ;;
+ --keep-zip|-[Kk]eep[Zz]ip)
+ keep_zip=true
+ non_dynamic_parameters+=" $name"
+ ;;
+ --zip-path|-[Zz]ip[Pp]ath)
+ shift
+ zip_path="$1"
+ ;;
+ -?|--?|-h|--help|-[Hh]elp)
+ script_name="dotnet-install.sh"
+ echo ".NET Tools Installer"
+ echo "Usage:"
+ echo " # Install a .NET SDK of a given Quality from a given Channel"
+ echo " $script_name [-c|--channel ] [-q|--quality ]"
+ echo " # Install a .NET SDK of a specific public version"
+ echo " $script_name [-v|--version ]"
+ echo " $script_name -h|-?|--help"
+ echo ""
+ echo "$script_name is a simple command line interface for obtaining dotnet cli."
+ echo " Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
+ echo " - The SDK needs to be installed without user interaction and without admin rights."
+ echo " - The SDK installation doesn't need to persist across multiple CI runs."
+ echo " To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer."
+ echo ""
+ echo "Options:"
+ echo " -c,--channel Download from the channel specified, Defaults to \`$channel\`."
+ echo " -Channel"
+ echo " Possible values:"
+ echo " - STS - the most recent Standard Term Support release"
+ echo " - LTS - the most recent Long Term Support release"
+ echo " - 2-part version in a format A.B - represents a specific release"
+ echo " examples: 2.0; 1.0"
+ echo " - 3-part version in a format A.B.Cxx - represents a specific SDK release"
+ echo " examples: 5.0.1xx, 5.0.2xx."
+ echo " Supported since 5.0 release"
+ echo " Warning: Value 'Current' is deprecated for the Channel parameter. Use 'STS' instead."
+ echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used."
+ echo " -v,--version Use specific VERSION, Defaults to \`$version\`."
+ echo " -Version"
+ echo " Possible values:"
+ echo " - latest - the latest build on specific channel"
+ echo " - 3-part version in a format A.B.C - represents specific version of build"
+ echo " examples: 2.0.0-preview2-006120; 1.1.0"
+ echo " -q,--quality Download the latest build of specified quality in the channel."
+ echo " -Quality"
+ echo " The possible values are: daily, preview, GA."
+ echo " Works only in combination with channel. Not applicable for STS and LTS channels and will be ignored if those channels are used."
+ echo " For SDK use channel in A.B.Cxx format. Using quality for SDK together with channel in A.B format is not supported."
+ echo " Supported since 5.0 release."
+ echo " Note: The version parameter overrides the channel parameter when any version other than 'latest' is used, and therefore overrides the quality."
+ echo " --internal,-Internal Download internal builds. Requires providing credentials via --feed-credential parameter."
+ echo " --feed-credential Token to access Azure feed. Used as a query string to append to the Azure feed."
+ echo " -FeedCredential This parameter typically is not specified."
+ echo " -i,--install-dir Install under specified location (see Install Location below)"
+ echo " -InstallDir"
+ echo " --architecture Architecture of dotnet binaries to be installed, Defaults to \`$architecture\`."
+ echo " --arch,-Architecture,-Arch"
+ echo " Possible values: x64, arm, arm64, s390x, ppc64le and loongarch64"
+ echo " --os Specifies operating system to be used when selecting the installer."
+ echo " Overrides the OS determination approach used by the script. Supported values: osx, linux, linux-musl, freebsd, rhel.6."
+ echo " In case any other value is provided, the platform will be determined by the script based on machine configuration."
+ echo " Not supported for legacy links. Use --runtime-id to specify platform for legacy links."
+ echo " Refer to: https://aka.ms/dotnet-os-lifecycle for more information."
+ echo " --runtime Installs a shared runtime only, without the SDK."
+ echo " -Runtime"
+ echo " Possible values:"
+ echo " - dotnet - the Microsoft.NETCore.App shared runtime"
+ echo " - aspnetcore - the Microsoft.AspNetCore.App shared runtime"
+ echo " --dry-run,-DryRun Do not perform installation. Display download link."
+ echo " --no-path, -NoPath Do not set PATH for the current process."
+ echo " --verbose,-Verbose Display diagnostics information."
+ echo " --azure-feed,-AzureFeed For internal use only."
+ echo " Allows using a different storage to download SDK archives from."
+ echo " --uncached-feed,-UncachedFeed For internal use only."
+ echo " Allows using a different storage to download SDK archives from."
+ echo " --skip-non-versioned-files Skips non-versioned files if they already exist, such as the dotnet executable."
+ echo " -SkipNonVersionedFiles"
+ echo " --jsonfile Determines the SDK version from a user specified global.json file."
+ echo " Note: global.json must have a value for 'SDK:Version'"
+ echo " --keep-zip,-KeepZip If set, downloaded file is kept."
+ echo " --zip-path, -ZipPath If set, downloaded file is stored at the specified path."
+ echo " -?,--?,-h,--help,-Help Shows this help message"
+ echo ""
+ echo "Install Location:"
+ echo " Location is chosen in following order:"
+ echo " - --install-dir option"
+ echo " - Environmental variable DOTNET_INSTALL_DIR"
+ echo " - $HOME/.dotnet"
+ exit 0
+ ;;
+ *)
+ say_err "Unknown argument \`$name\`"
+ exit 1
+ ;;
+ esac
+
+ shift
+done
+
+say_verbose "Note that the intended use of this script is for Continuous Integration (CI) scenarios, where:"
+say_verbose "- The SDK needs to be installed without user interaction and without admin rights."
+say_verbose "- The SDK installation doesn't need to persist across multiple CI runs."
+say_verbose "To set up a development environment or to run apps, use installers rather than this script. Visit https://dotnet.microsoft.com/download to get the installer.\n"
+
+if [ "$internal" = true ] && [ -z "$(echo $feed_credential)" ]; then
+ message="Provide credentials via --feed-credential parameter."
+ if [ "$dry_run" = true ]; then
+ say_warning "$message"
+ else
+ say_err "$message"
+ exit 1
+ fi
+fi
+
+check_min_reqs
+calculate_vars
+# generate_regular_links call below will 'exit' if the determined version is already installed.
+generate_download_links
+
+if [[ "$dry_run" = true ]]; then
+ print_dry_run
+ exit 0
+fi
+
+install_dotnet
+
+bin_path="$(get_absolute_path "$(combine_paths "$install_root" "$bin_folder_relative_path")")"
+if [ "$no_path" = false ]; then
+ say "Adding to current process PATH: \`$bin_path\`. Note: This change will be visible only when sourcing script."
+ export PATH="$bin_path":"$PATH"
+else
+ say "Binaries of dotnet can be found in $bin_path"
+fi
+
+say "Note that the script does not resolve dependencies during installation."
+say "To check the list of dependencies, go to https://learn.microsoft.com/dotnet/core/install, select your operating system and check the \"Dependencies\" section."
+say "Installation finished successfully."