
S3 Management Using AWS SDK for .NET (C#)
Here’s a guide on how to interact with S3 buckets and objects using the AWS SDK for .NET (C#) in conjunction with LocalStack (Amazon S3 local simulation).
The article include two main sections:
- C# Code Scenarios/Snippets.
- Best Practices of using AWS SDK for .NET to manage S3.
Prerequisites:
- LocalStack should be running on your local machine.
Installing LocalStack (To Simulate AWS) using Docker Desktop for Windows - The Nuget Package AWSSDK.S3 has been added.
An example command to do isdotnet add package AWSSDK.S3
For this guide, we assume that you have LocalStack running on http://localhost:4566
and an AWS CLI profile called localstack
.
C# Code Scenarios/Snippets
Create Amazon S3 client
using Amazon.S3;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
}
static AmazonS3Client CreateAmazonS3Client()
{
var awsAccessKeyId = "test";
var awsSecretAccessKey = "test";
var serviceURL = "http://localhost:4566";
var region = "us-east-1";
var enablePathStyleAccess = true;
var s3Config = new AmazonS3Config
{
ServiceURL = serviceURL,
AuthenticationRegion = region,
ForcePathStyle = enablePathStyleAccess
};
return new AmazonS3Client(awsAccessKeyId, awsSecretAccessKey, s3Config);
}
}
Create bucket
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
await CreateBucketAsync(bucketName, s3Client);
}
static async Task CreateBucketAsync(string bucketName, IAmazonS3 s3Client)
{
var request = new PutBucketRequest
{
BucketName = bucketName
};
await s3Client.PutBucketAsync(request);
}
}
List buckets
using Amazon.S3;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
var buckets = await ListBucketsAsync(s3Client);
Console.WriteLine("Buckets:");
foreach (var bucket in buckets)
{
Console.WriteLine($"- {bucket}");
}
}
static async Task<List<string>> ListBucketsAsync(IAmazonS3 s3Client)
{
var response = await s3Client.ListBucketsAsync();
return response.Buckets.Select(b => b.BucketName).ToList();
}
}
Upload Object
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
string objectLocalPath = @"C:\repos\Testfiles\my-object.txt";
await UploadObjectAsync(s3Client, bucketName, objectName, objectLocalPath);
Console.WriteLine($"Object {objectName} uploaded to bucket {bucketName}.");
}
static async Task UploadObjectAsync(IAmazonS3 s3Client, string bucketName, string objectName, string objectLocalPath)
{
var request = new PutObjectRequest
{
BucketName = bucketName,
Key = objectName,
FilePath = objectLocalPath
};
await s3Client.PutObjectAsync(request);
}
}
Get ETag value of an Object
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
var eTag = await GetEtagAsync(s3Client, bucketName, objectName);
Console.WriteLine($"ETag of {objectName}: {eTag}");
}
static async Task<string> GetEtagAsync(IAmazonS3 s3Client, string bucketName, string objectName)
{
var request = new GetObjectMetadataRequest
{
BucketName = bucketName,
Key = objectName
};
var response = await s3Client.GetObjectMetadataAsync(request);
return response.ETag;
}
}
Update Object Metadata
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
var metadata = new Dictionary<string, string>
{
{ "SomeKey", "SomeValue" }
};
await UpdateObjectMetadataAsync(s3Client, bucketName, objectName, metadata);
Console.WriteLine($"Metadata has been updated");
}
static async Task UpdateObjectMetadataAsync(IAmazonS3 s3Client, string bucketName, string objectName, Dictionary<string, string> metadata)
{
var copyRequest = new CopyObjectRequest
{
SourceBucket = bucketName,
SourceKey = objectName,
DestinationBucket = bucketName,
DestinationKey = objectName,
MetadataDirective = S3MetadataDirective.REPLACE
};
foreach (var kvp in metadata)
{
copyRequest.Metadata.Add(kvp.Key, kvp.Value);
}
await s3Client.CopyObjectAsync(copyRequest);
}
}
List Metadata of an Object
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
var metadata = await GetObjectMetadataAsync(s3Client, bucketName, objectName);
foreach (var kvp in metadata)
{
await Console.Out.WriteLineAsync($"{kvp.Key} - {kvp.Value}");
}
}
static async Task<Dictionary<string, string>> GetObjectMetadataAsync(IAmazonS3 s3Client, string bucketName, string objectName)
{
var request = new GetObjectMetadataRequest
{
BucketName = bucketName,
Key = objectName
};
var response = await s3Client.GetObjectMetadataAsync(request);
return response.Metadata.Keys.ToDictionary(key => key, key => response.Metadata[key]);
}
}
Download Object
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
string downloadPath = @"C:\repos\Testfiles\the-downloaded-my-object.txt";
await DownloadObjectAsync(s3Client, bucketName, objectName, downloadPath);
Console.WriteLine($"{objectName} has been dowloaded into {downloadPath}");
}
static async Task DownloadObjectAsync(IAmazonS3 s3Client, string bucketName, string objectName, string downloadPath)
{
var request = new GetObjectRequest
{
BucketName = bucketName,
Key = objectName
};
using (var response = await s3Client.GetObjectAsync(request))
{
await response.WriteResponseStreamToFileAsync(downloadPath, false, CancellationToken.None);
}
}
}
Delete Object
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
string objectName = "test-object.txt";
await DeleteObjectAsync(s3Client, bucketName, objectName);
Console.WriteLine($"{objectName} has been deleted");
}
static async Task DeleteObjectAsync(IAmazonS3 s3Client, string bucketName, string objectName)
{
var request = new DeleteObjectRequest
{
BucketName = bucketName,
Key = objectName
};
await s3Client.DeleteObjectAsync(request);
}
}
Delete Bucket
using Amazon.S3;
using Amazon.S3.Model;
static partial class Program
{
static async Task Main(string[] args)
{
AmazonS3Client s3Client = CreateAmazonS3Client();
string bucketName = "test-bucket";
await DeleteBucketAsync(s3Client, bucketName);
Console.WriteLine($"{bucketName} has been deleted");
}
static async Task DeleteBucketAsync(IAmazonS3 s3Client, string bucketName)
{
var request = new DeleteBucketRequest
{
BucketName = bucketName
};
await s3Client.DeleteBucketAsync(request);
}
}
Best Practices of using AWS SDK for .NET to manage S3:
Use Dependency Injection for AWS Clients: Register AWS services in the dependency injection container to promote reusability and testability.
public void ConfigureServices(IServiceCollection services)
{
services.AddAWSService<IAmazonS3>();
services.AddDefaultAWSOptions(Configuration.GetAWSOptions());
}
public class S3Service
{
private readonly IAmazonS3 _s3Client;
public S3Service(IAmazonS3 s3Client)
{
_s3Client = s3Client;
}
}
Use Asynchronous Programming: Always use asynchronous methods (e.g., PutObjectAsync
) to prevent blocking threads.
public async Task UploadFileAsync(string bucketName, string key, string filePath, IAmazonS3 s3Client)
{
var putRequest = new PutObjectRequest
{
BucketName = bucketName,
Key = key,
FilePath = filePath
};
await s3Client.PutObjectAsync(putRequest);
}
Enable Retries and Configure Resilience: Use the built-in retry mechanism or configure resilience policies with SDK’s RetryPolicy
.
var s3Config = new AmazonS3Config
{
MaxErrorRetry = 3 // Retry 3 times before failing
};
var s3Client = new AmazonS3Client(accessKeyId, secretAccessKey, s3Config);
Leverage TransferUtility for Large Files: Use TransferUtility
for uploads/downloads of large files to benefit from multipart uploads.
var transferUtility = new TransferUtility(s3Client);
await transferUtility.UploadAsync(filePath, bucketName);
Use Presigned URLs for Temporary Access: Generate presigned URLs for temporary read/write access instead of exposing credentials.
var request = new GetPreSignedUrlRequest
{
BucketName = bucketName,
Key = objectKey,
Expires = DateTime.UtcNow.AddMinutes(15)
};
string url = s3Client.GetPreSignedURL(request);
Console.WriteLine($"Presigned URL: {url}");
Handle Exceptions Gracefully: Use specific exception types for granular error handling (e.g., AmazonS3Exception
).
try
{
await s3Client.GetObjectAsync(bucketName, objectKey);
}
catch (AmazonS3Exception ex) when (ex.StatusCode == System.Net.HttpStatusCode.NotFound)
{
Console.WriteLine("Object not found.");
}
catch (Exception ex)
{
Console.WriteLine($"Unexpected error: {ex.Message}");
}
Use Path-Style Access with LocalStack: When working with LocalStack or other S3-compatible services, force path-style access.
var s3Config = new AmazonS3Config
{
ServiceURL = "http://localhost:4566",
ForcePathStyle = true
};
Encrypt Data at Rest and In Transit: Enable server-side encryption (SSE) or client-side encryption for sensitive data.
var putRequest = new PutObjectRequest
{
BucketName = bucketName,
Key = objectKey,
FilePath = filePath,
ServerSideEncryptionMethod = ServerSideEncryptionMethod.AES256
};
await s3Client.PutObjectAsync(putRequest);
Paginate for Large Object Lists: Use pagination when listing buckets or objects to avoid performance issues with large datasets.
var request = new ListObjectsV2Request
{
BucketName = bucketName,
MaxKeys = 100
};
ListObjectsV2Response response;
do
{
response = await s3Client.ListObjectsV2Async(request);
foreach (var obj in response.S3Objects)
{
Console.WriteLine($"Object: {obj.Key}");
}
request.ContinuationToken = response.NextContinuationToken;
} while (response.IsTruncated);
Use Tags for Better Management: Tag buckets and objects to enable easier tracking and cost allocation.
var tagRequest = new PutObjectTaggingRequest
{
BucketName = bucketName,
Key = objectKey,
Tagging = new Tagging
{
TagSet = new List<Tag>
{
new Tag { Key = "Project", Value = "ProjectX" },
new Tag { Key = "Environment", Value = "Production" }
}
}
};
await s3Client.PutObjectTaggingAsync(tagRequest);
Clean Up Resources to Avoid Costs: Delete unused buckets, objects, or incomplete multipart uploads.
var response = await s3Client.ListMultipartUploadsAsync(new ListMultipartUploadsRequest { BucketName = bucketName });
foreach (var upload in response.MultipartUploads)
{
await s3Client.AbortMultipartUploadAsync(new AbortMultipartUploadRequest
{
BucketName = bucketName,
Key = upload.Key,
UploadId = upload.UploadId
});
}
Validate Input Parameters: Validate bucket names, object keys, and paths to avoid runtime errors.
if (!Regex.IsMatch(bucketName, @"^[a-z0-9.-]{3,63}$"))
{
throw new ArgumentException("Invalid bucket name.");
}
Use Environment Variables for Credentials: Avoid hardcoding credentials; use environment variables, IAM roles, or AWS Secrets Manager.
export AWS_ACCESS_KEY_ID=yourAccessKey
export AWS_SECRET_ACCESS_KEY=yourSecretKey
Enable Bucket Versioning: Enable versioning to maintain object history and prevent accidental deletions.
await s3Client.PutBucketVersioningAsync(new PutBucketVersioningRequest
{
BucketName = bucketName,
VersioningConfig = new S3BucketVersioningConfig { Status = VersionStatus.Enabled }
});
Use Lifecycle Policies: Use lifecycle policies for automated transitions or expirations of objects.
var lifecycleRequest = new PutLifecycleConfigurationRequest
{
BucketName = bucketName,
Configuration = new LifecycleConfiguration
{
Rules = new List<LifecycleRule>
{
new LifecycleRule
{
Id = "ArchiveOldFiles",
Status = LifecycleRuleStatus.Enabled,
Prefix = "logs/",
Transitions = new List<LifecycleTransition>
{
new LifecycleTransition
{
Days = 30,
StorageClass = S3StorageClass.Glacier
}
}
}
}
}
};
await s3Client.PutLifecycleConfigurationAsync(lifecycleRequest);
Finally, Monitor and Log S3 Operations: Enable AWS CloudTrail and S3 server access logs for tracking activity. Note that there is no direct SDK configuration; enable via AWS Management Console or CLI.
Done! 😊
💡 Please let me know your opinion in the comments.
Thank you for your time 😊