Store An Object
Prerequisites
pip install boto3 google-cloud-storage azure-storage-blobnpm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner \
@google-cloud/storage \
@azure/storage-blobgo get github.com/aws/aws-sdk-go-v2@latest
go get github.com/aws/aws-sdk-go-v2/config
go get cloud.google.com/go/storage
go get github.com/Azure/azure-sdk-for-go/sdk/storage/azblobAWS S3
1
Configuration
# flashback_aws_config.py
import boto3
from botocore.client import Config
# Replace with your Flashback credentials and endpoint
ENDPOINT = "https://s3-us-east-1-aws.flashback.tech"
# Consider READ/WRITE API Information of the Repository
API_KEY_ID = "YOUR_API_KEY_ID"
API_SECRET = "YOUR_API_SECRET"
session = boto3.session.Session(
aws_access_key_id=API_KEY_ID,
aws_secret_access_key=API_SECRET
)
s3_client = session.client(
service_name="s3",
endpoint_url=ENDPOINT,
config=Config(signature_version="s3v4")
)// flashbackAwsConfig.js
import { S3Client } from "@aws-sdk/client-s3";
// Replace with your Flashback credentials and endpoint
const ENDPOINT = "https://s3-us-east-1-aws.flashback.tech";
const API_KEY_ID = "YOUR_API_KEY_ID";
const API_SECRET = "YOUR_API_SECRET";
export const s3Client = new S3Client({
endpoint: ENDPOINT,
region: "us-east-1",
credentials: {
accessKeyId: API_KEY_ID,
secretAccessKey: API_SECRET
},
forcePathStyle: true
});2
Upload a File
# aws_upload.py
from flashback_aws_config import s3_client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-bucket-name"
FILE_PATH = "path/to/local/file.txt"
OBJECT_NAME = FILE_PATH.split("/")[-1]
s3_client.upload_file(
Filename=FILE_PATH,
Bucket=BUCKET_NAME,
Key=OBJECT_NAME
)
print(f"Uploaded {OBJECT_NAME} to {BUCKET_NAME}")// awsUpload.js
import { PutObjectCommand } from "@aws-sdk/client-s3";
import fs from "fs";
import { s3Client } from "./flashbackAwsConfig.js";
const BUCKET_NAME = "your-bucket-name";
const FILE_PATH = "path/to/local/file.txt";
const OBJECT_NAME = FILE_PATH.split("/").pop();
const BODY = fs.readFileSync(FILE_PATH);
(async () => {
await s3Client.send(new PutObjectCommand({
Bucket: BUCKET_NAME,
Key: OBJECT_NAME,
Body
}));
console.log(`Uploaded ${OBJECT_NAME} to ${BUCKET_NAME}`);
})();3
Download a File
# aws_download.py
from flashback_aws_config import s3_client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-bucket-name"
OBJECT_NAME = "file.txt"
DEST_PATH = "downloads/file.txt"
s3_client.download_file(
Bucket=BUCKET_NAME,
Key=OBJECT_NAME,
Filename=DEST_PATH
)
print(f"Downloaded {OBJECT_NAME} to {DEST_PATH}")// awsDownload.js
import { GetObjectCommand } from "@aws-sdk/client-s3";
import fs from "fs";
import { s3Client } from "./flashbackAwsConfig.js";
const BUCKET_NAME = "your-bucket-name";
const OBJECT_NAME = "file.txt";
const DEST_PATH = "downloads/file.txt";
(async () => {
const { Body } = await s3Client.send(new GetObjectCommand({
Bucket: BUCKET_NAME,
Key: OBJECT_NAME
}));
const chunks = [];
for await (let chunk of Body) chunks.push(chunk);
fs.writeFileSync(DEST_PATH, Buffer.concat(chunks));
console.log(`Downloaded ${OBJECT_NAME} to ${DEST_PATH}`);
})();Google Cloud Storage
Azure Blob Storage
Next Steps
Last updated
Was this helpful?