Data Management OSS (Object Storage Service) の Direct-to-S3 アプローチへの移行についてのアナウンスがありましたので、この移行をよりスムーズにおこなっていただくための情報をご提供したいと思います。今回は、Autodesk Forge サービスにおける新しいバイナリ転送のための Node.js ユーティリティについてです。 これらのサンプルは、LTS バージョンの .NET を使用してビルドされています。
チームはまた、Direct-to-S3 アプローチを使用する新しいSDKの開発にも取り組んでいます。
チームの Joao Martins は、OSS Direct-to-S3 アプローチのために新しくリリースされたすべてのエンドポイントを含む、キュレーションされたユーティリティファイルに取り組みました。
Githubのリポジトリはこちらで確認できます。その中で、.NET 6ブランチはこちら、.NET Core 3.1で作業する場合は、こちらで確認することができます。
BinarytransferClient.cs
using Autodesk.Forge;
using Newtonsoft.Json;
using RestSharp;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using System.Web;
namespace Forge_Upload_DirectToS3
{
public static class BinarytransferClient
{
public static string BASE_URL { get; set; }
public static dynamic CREDENTIAL { get; set; }
public static int UPLOAD_CHUNK_SIZE { get; set; }
public static string CLIENT_ID { get; set; }
public static string CLIENT_SECRET { get; set; }
public static int MAX_RETRY { get; set; }
/// <summary>
/// Return the URLs to upload the file
/// </summary>
/// <param name="bucketKey">Bucket key</param>
/// <param name="objectKey">Object key</param>
/// <param name="parts">[parts=1] How many URLs to generate in case of multi-part upload</param>
/// <param name="firstPart">B[firstPart=1] Index of the part the first returned URL should point to</param>
/// <param name="uploadKey">[uploadKey] Optional upload key if this is a continuation of a previously initiated upload</param>
/// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
public static async Task<dynamic> getUploadUrls(string bucketKey, string objectKey, int? minutesExpiration, int parts = 1, int firstPart = 1, string uploadKey = null)
{
string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3upload";
RestClient client = new RestClient(BASE_URL);
RestRequest request = new RestRequest(endpoint, RestSharp.Method.GET);
request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
request.AddHeader("Content-Type", "application/json");
request.AddParameter("parts", parts, ParameterType.QueryString);
request.AddParameter("firstPart", firstPart, ParameterType.QueryString);
if (!string.IsNullOrEmpty(uploadKey))
{
request.AddParameter("uploadKey", uploadKey, ParameterType.QueryString);
}
if (minutesExpiration != null)
{
request.AddParameter("minutesExpiration", minutesExpiration, ParameterType.QueryString);
}
var response = await client.ExecuteAsync(request);
//Here we handle 429 for Get Upload URLs
if (response.StatusCode == HttpStatusCode.TooManyRequests)
{
int retryAfter = 0;
int.TryParse(response.Headers.ToList()
.Find(x => x.Name == "Retry-After")
.Value.ToString(), out retryAfter);
Task.WaitAll(Task.Delay(retryAfter));
return await getUploadUrls(bucketKey, objectKey, minutesExpiration, parts, firstPart, uploadKey);
}
return JsonConvert.DeserializeObject(response.Content);
}
/// <summary>
/// Upload the FileStream to specified bucket
/// </summary>
/// <param name="bucketKey">Bucket key</param>
/// <param name="objectKey">Object key</param>
/// <param name="fileStream">FileStream from input file</param>
public static async Task<dynamic> UploadToBucket(string bucketKey, string objectKey, FileStream fileStream)
{
long fileSize = fileStream.Length;
int maxBatches = 25;
int numberOfChunks = (int)Math.Round((double)(fileSize / UPLOAD_CHUNK_SIZE)) + 1;
int partsUploaded = 0;
long start = 0;
List<string> uploadUrls = new List<string>();
string uploadKey = null;
using (BinaryReader reader = new BinaryReader(fileStream))
{
while (partsUploaded < numberOfChunks)
{
int attempts = 0;
long end = Math.Min((partsUploaded + 1) * UPLOAD_CHUNK_SIZE, fileSize);
long numberOfBytes = end - start;
byte[] fileBytes = new byte[numberOfBytes];
reader.BaseStream.Seek((int)start, SeekOrigin.Begin);
int count = reader.Read(fileBytes, 0, (int)numberOfBytes);
while (true)
{
attempts++;
Console.WriteLine($"Uploading part {partsUploaded + 1}, attempt {attempts}");
if (uploadUrls.Count == 0)
{
CREDENTIAL = await Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });
dynamic uploadParams = await getUploadUrls(bucketKey, objectKey, null, Math.Min(numberOfChunks - partsUploaded, maxBatches), partsUploaded + 1, uploadKey);
uploadKey = uploadParams.uploadKey;
uploadUrls = uploadParams.urls.ToObject<List<string>>();
}
string currentUrl = uploadUrls[0];
uploadUrls.RemoveAt(0);
try
{
var responseBuffer = await UploadBufferRestSharp(currentUrl, fileBytes);
int statusCode = (int)responseBuffer.StatusCode;
switch (statusCode)
{
case 403:
Console.WriteLine("403, refreshing urls");
uploadUrls = new List<string>();
break;
case int n when (n >= 400):
throw new Exception(responseBuffer.Content);
default:
goto NextChunk;
}
}
catch (Exception ex)
{
Console.WriteLine(ex.Message);
if (attempts == MAX_RETRY)
throw;
}
}
NextChunk:
partsUploaded++;
start = end;
System.Console.WriteLine($"{partsUploaded.ToString()} parts uploaded!");
}
}
var responseUpload = await CompleteUpload(bucketKey, objectKey, uploadKey);
return responseUpload;
}
/// <summary>
/// Upload the specific part through url
/// </summary>
/// <param name="url">URL to upload the specified part</param>
/// <param name="buffer">Buffer array to upload</param>
public static async Task<dynamic> UploadBufferRestSharp(string url, byte[] buffer)
{
RestClient client = new RestClient();
RestRequest request = new RestRequest(url, RestSharp.Method.PUT);
request.AddParameter("", buffer, ParameterType.RequestBody);
var response = await client.ExecuteAsync(request);
return response;
}
/// <summary>
/// Finalizes the upload of a file to OSS.
/// </summary>
/// <param name="bucketKey">Bucket key</param>
/// <param name="objectKey">Object key</param>
/// <param name="uploadKey">[uploadKey] Optional upload key if this is a continuation of a previously initiated upload</param>
public static async Task<dynamic> CompleteUpload(string bucketKey, string objectKey, string uploadKey)
{
string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3upload";
RestClient client = new RestClient(BASE_URL);
RestRequest request = new RestRequest(endpoint, Method.POST);
request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
request.AddHeader("Content-Type", "application/json");
request.AddJsonBody(new { uploadKey = $"{uploadKey}" });
var response = await client.ExecuteAsync(request);
return response;
}
/// <summary>
/// Return the URLs to upload the file
/// </summary>
/// <param name="bucketKey">Bucket key</param>
/// <param name="objectKey">Object key</param>
/// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
public static async Task<dynamic> getDownloadUrl(string bucketKey, string objectKey, int? minutesExpiration)
{
string endpoint = $"/buckets/{bucketKey}/objects/{HttpUtility.UrlEncode(objectKey)}/signeds3download";
RestClient client = new RestClient(BASE_URL);
RestRequest request = new RestRequest(endpoint, RestSharp.Method.GET);
request.AddHeader("Authorization", "Bearer " + CREDENTIAL.access_token);
request.AddHeader("Content-Type", "application/json");
if (minutesExpiration != null)
{
request.AddParameter("minutesExpiration", minutesExpiration, ParameterType.QueryString);
}
var response = await client.ExecuteAsync(request);
//Here we handle 429 for Get Download URLs
if (response.StatusCode == HttpStatusCode.TooManyRequests)
{
int retryAfter = 0;
int.TryParse(response.Headers.ToList()
.Find(x => x.Name == "Retry-After")
.Value.ToString(), out retryAfter);
Task.WaitAll(Task.Delay(retryAfter));
return await getDownloadUrl(bucketKey, objectKey, minutesExpiration);
}
return JsonConvert.DeserializeObject(response.Content);
}
/// <summary>
/// Download the specific part through url
/// </summary>
/// <param name="url">URL to download the file</param>
public static byte[] DownloadBufferRestSharp(string url)
{
RestClient client = new RestClient();
RestRequest request = new RestRequest(url, RestSharp.Method.GET);
byte[] data = client.DownloadData(request);
return data;
}
/// <summary>
/// Return the byte array of the downloaded content
/// </summary>
/// <param name="bucketKey">Bucket key</param>
/// <param name="objectKey">Object key</param>
/// <param name="minutesExpiration">[minutesExpiration] Custom expiration for the upload URLs (within the 1 to 60 minutes range). If not specified, default is 2 minutes.
public static async Task<byte[]> DownloadFromBucket(string bucketKey, string objectKey, int? minutesExpiration)
{
dynamic downloadParams = await getDownloadUrl(bucketKey, objectKey, minutesExpiration);
if (downloadParams.status != "complete")
{
throw new Exception("File not available for download yet.");
}
byte[] downloadedBuffer = DownloadBufferRestSharp(downloadParams.url.ToString());
return downloadedBuffer;
}
/// <summary>
/// Get the access token from Autodesk
/// </summary>
public static async Task<dynamic> Get2LeggedTokenAsync(Scope[] scopes)
{
TwoLeggedApi oauth = new TwoLeggedApi();
string grantType = "client_credentials";
dynamic bearer = await oauth.AuthenticateAsync(
CLIENT_ID,
CLIENT_SECRET,
grantType,
scopes);
return bearer;
}
}
}
署名済み URL(Signed URL)のデフォルトの有効期限は2分です(minutesExpiration パラメータで最大60分まで有効期限を延長することができます)。
ダウンロード
まず、ダウンロードの手順からご紹介します。AWS S3 から署名済み URL(Signed URL)を使ってファイルを直接ダウンロードするために、2つのステップを踏む必要があります。以下は、その仕組みを説明する擬似コードです。
- GET buckets/:bucketKey/objects/:objectName/signeds3download エンドポイントを使ってダウンロード用の URL を生成します。
- 新しいURLを使用して、AWS S3 から直接 OSS オブジェクトをダウンロードします。
- レスポンス コードが 100~199、429、500~599 の場合、ダウンロードの再試行(例えば指数関数的バックオフ)を検討する。
OSS Bucket からファイルをダウンロードする場合のコードは次のようになります(最初にファイル全体をメモリに受信します)。
using System.IO;
using System.Threading.Tasks;
using Autodesk.Forge;
namespace Forge_Upload_DirectToS3.test
{
public class download_from_bucket
{
public static async Task<dynamic> DownloadFile(string filePath, string bucketKey, string objectKey)
{
BinarytransferClient.CREDENTIAL = await BinarytransferClient.Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });
dynamic response = new System.Dynamic.ExpandoObject();
response.Status = "Download started!";
System.Console.WriteLine(response.Status);
byte[] downloadedBuffer = await BinarytransferClient.DownloadFromBucket(bucketKey, objectKey, null);
await File.WriteAllBytesAsync(filePath, downloadedBuffer);
response.Status = "Download Complete!";
return response;
}
}
}
アップロード
次にアップロードの手順をご紹介します。AWS S3 から署名付き URL(Signed URL)を使って直接ファイルをアップロードするには、3 つのステップを踏む必要があります。以下は、その仕組みを説明する擬似コードです。
- アップロードするファイルのパーツ数を算出
- 注意:最後の 1 つを除き、アップロードする各パーツは 5 MB 以上であること
- GET buckets/:bucketKey/objects/:objectKey/signeds3upload?firstPart=<index of first part>&parts=<number of parts> エンドポイントを使用して特定のパーツのファイルをアップロードするための、最大 25 の URL を生成
-
パーツ番号は 1 から始まるものと仮定
-
例えば、10 番パーツから 15 番パーツまでのアップロード用 URL を生成するには、<index of first part> を 10 に、<number of parts> を 6 に設定
-
このエンドポイントは、後で追加の URL を要求したり、アップロードを確定するために使用する uploadKey も返す
-
- 残りのパーツ ファイルを、対応するアップロード URL にアップロード
-
レスポンスコードが 100~199、429、500~599 の場合、個々のアップロードの再試行を検討する(例えば指数関数的バックオフを使用)
-
レスポンスコードが 403 の場合、アップロード用 URL の有効期限が切れているため、上記手順 2. へ戻る
-
アップロード用 URL をすべて使い切ってしまい、まだアップロードする必要があるパーツが存在する場合、手順 2. に戻って URL を生成する
-
-
POST buckets/:bucketKey/objects/:objectKey/signeds3upload エンドポイントを使用して、ステップ 2. からの uploadKey 値を使用してアップロードを確定させる
ローカルファイルを OSS Bucket にアップロードする場合(FileStream経由)のコードは、下記のようになります。
using System.IO;
using System.Threading.Tasks;
namespace Forge_Upload_DirectToS3.test
{
public class upload_to_bucket
{
public static async Task<dynamic> UploadFile(string filePath, string bucketKey, string objectKey)
{
FileStream fileStream = new FileStream(filePath, FileMode.Open);
var response = await BinarytransferClient.UploadToBucket(bucketKey, objectKey, fileStream);
return response;
}
}
}
using Autodesk.Forge;
using Autodesk.Forge.Model;
using System;
using System.Collections.Generic;
using System.IO;
using System.Threading.Tasks;
namespace Forge_Upload_DirectToS3.test
{
public static class upload_to_docs
{
public static async Task<dynamic> UploadFile(string filePath, string projectId, string folderId, string fileName)
{
BinarytransferClient.CREDENTIAL = await BinarytransferClient.Get2LeggedTokenAsync(new Scope[] { Scope.DataRead, Scope.DataWrite, Scope.DataCreate });
FileStream fileStream = new FileStream(filePath, FileMode.Open);
// prepare storage
ProjectsApi projectApi = new ProjectsApi();
projectApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
StorageRelationshipsTargetData storageRelData = new StorageRelationshipsTargetData(StorageRelationshipsTargetData.TypeEnum.Folders, folderId);
CreateStorageDataRelationshipsTarget storageTarget = new CreateStorageDataRelationshipsTarget(storageRelData);
CreateStorageDataRelationships storageRel = new CreateStorageDataRelationships(storageTarget);
BaseAttributesExtensionObject attributes = new BaseAttributesExtensionObject(string.Empty, string.Empty, new JsonApiLink(string.Empty), null);
CreateStorageDataAttributes storageAtt = new CreateStorageDataAttributes(fileName, attributes);
CreateStorageData storageData = new CreateStorageData(CreateStorageData.TypeEnum.Objects, storageAtt, storageRel);
CreateStorage storage = new CreateStorage(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), storageData);
dynamic storageCreated = await projectApi.PostStorageAsync(projectId, storage);
string[] storageIdParams = ((string)storageCreated.data.id).Split('/');
string[] bucketKeyParams = storageIdParams[storageIdParams.Length - 2].Split(':');
string bucketKey = bucketKeyParams[bucketKeyParams.Length - 1];
string objectName = storageIdParams[storageIdParams.Length - 1];
// upload the file/object, which will create a new object
ObjectsApi objects = new ObjectsApi();
objects.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
//This is the only difference from the old method
var response = await BinarytransferClient.UploadToBucket(bucketKey, objectName, fileStream);
if ((int)response.StatusCode >= 400)
{
throw new Exception(response.Content);
}
// check if file already exists...
FoldersApi folderApi = new FoldersApi();
folderApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
var filesInFolder = await folderApi.GetFolderContentsAsync(projectId, folderId);
string itemId = string.Empty;
foreach (KeyValuePair<string, dynamic> item in new DynamicDictionaryItems(filesInFolder.data))
if (item.Value.attributes.displayName == fileName)
itemId = item.Value.id; // this means a file with same name is already there, so we'll create a new version
// now decide whether create a new item or new version
if (string.IsNullOrWhiteSpace(itemId))
{
// create a new item
BaseAttributesExtensionObject baseAttribute = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "items:autodesk.core:File" : "items:autodesk.bim360:File", "1.0");
CreateItemDataAttributes createItemAttributes = new CreateItemDataAttributes(fileName, baseAttribute);
CreateItemDataRelationshipsTipData createItemRelationshipsTipData = new CreateItemDataRelationshipsTipData(CreateItemDataRelationshipsTipData.TypeEnum.Versions, CreateItemDataRelationshipsTipData.IdEnum._1);
CreateItemDataRelationshipsTip createItemRelationshipsTip = new CreateItemDataRelationshipsTip(createItemRelationshipsTipData);
StorageRelationshipsTargetData storageTargetData = new StorageRelationshipsTargetData(StorageRelationshipsTargetData.TypeEnum.Folders, folderId);
CreateStorageDataRelationshipsTarget createStorageRelationshipTarget = new CreateStorageDataRelationshipsTarget(storageTargetData);
CreateItemDataRelationships createItemDataRelationhips = new CreateItemDataRelationships(createItemRelationshipsTip, createStorageRelationshipTarget);
CreateItemData createItemData = new CreateItemData(CreateItemData.TypeEnum.Items, createItemAttributes, createItemDataRelationhips);
BaseAttributesExtensionObject baseAttExtensionObj = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "versions:autodesk.core:File" : "versions:autodesk.bim360:File", "1.0");
CreateStorageDataAttributes storageDataAtt = new CreateStorageDataAttributes(fileName, baseAttExtensionObj);
CreateItemRelationshipsStorageData createItemRelationshipsStorageData = new CreateItemRelationshipsStorageData(CreateItemRelationshipsStorageData.TypeEnum.Objects, storageCreated.data.id);
CreateItemRelationshipsStorage createItemRelationshipsStorage = new CreateItemRelationshipsStorage(createItemRelationshipsStorageData);
CreateItemRelationships createItemRelationship = new CreateItemRelationships(createItemRelationshipsStorage);
CreateItemIncluded includedVersion = new CreateItemIncluded(CreateItemIncluded.TypeEnum.Versions, CreateItemIncluded.IdEnum._1, storageDataAtt, createItemRelationship);
CreateItem createItem = new CreateItem(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), createItemData, new List<CreateItemIncluded>() { includedVersion });
ItemsApi itemsApi = new ItemsApi();
itemsApi.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
var newItem = await itemsApi.PostItemAsync(projectId, createItem);
return newItem;
}
else
{
// create a new version
BaseAttributesExtensionObject attExtensionObj = new BaseAttributesExtensionObject(projectId.StartsWith("a.") ? "versions:autodesk.core:File" : "versions:autodesk.bim360:File", "1.0");
CreateStorageDataAttributes storageDataAtt = new CreateStorageDataAttributes(fileName, attExtensionObj);
CreateVersionDataRelationshipsItemData dataRelationshipsItemData = new CreateVersionDataRelationshipsItemData(CreateVersionDataRelationshipsItemData.TypeEnum.Items, itemId);
CreateVersionDataRelationshipsItem dataRelationshipsItem = new CreateVersionDataRelationshipsItem(dataRelationshipsItemData);
CreateItemRelationshipsStorageData itemRelationshipsStorageData = new CreateItemRelationshipsStorageData(CreateItemRelationshipsStorageData.TypeEnum.Objects, storageCreated.data.id);
CreateItemRelationshipsStorage itemRelationshipsStorage = new CreateItemRelationshipsStorage(itemRelationshipsStorageData);
CreateVersionDataRelationships dataRelationships = new CreateVersionDataRelationships(dataRelationshipsItem, itemRelationshipsStorage);
CreateVersionData versionData = new CreateVersionData(CreateVersionData.TypeEnum.Versions, storageDataAtt, dataRelationships);
CreateVersion newVersionData = new CreateVersion(new JsonApiVersionJsonapi(JsonApiVersionJsonapi.VersionEnum._0), versionData);
VersionsApi versionsApis = new VersionsApi();
versionsApis.Configuration.AccessToken = BinarytransferClient.CREDENTIAL.access_token;
dynamic newVersion = await versionsApis.PostVersionAsync(projectId, newVersionData);
return newVersion;
}
}
}
}
ご不明な点等ございましたら、[email protected].までお問い合わせください。
※ 本記事は Direct-to-S3 .NET samples から転写・翻訳して一部加筆したものです。
By Toshiaki Isezaki
コメント
コメントフィードを購読すればディスカッションを追いかけることができます。