Store An Object
The guide is experimental and may contain errors as our technology continues to evolve. If you encounter any problems, please do not hesitate to contact us in Discord and give us your feedback.
This guide demonstrates how to interact with the Flashback Platform to upload and download files in your application backend:
Prerequisites
Python 3.9+
A Flashback Repository with a valid API key (READ or WRITE) – see Create a Repository.
Network access to your Bridge endpoints.
Install Required Packages
pip install boto3 google-cloud-storage azure-storage-blobNode.js v14+
A Flashback Repository with a valid API key (READ or WRITE) – see Create a Repository.
Network access to your Bridge endpoints
Install Required Packages
npm install @aws-sdk/client-s3 @aws-sdk/s3-request-presigner \
@google-cloud/storage \
@azure/storage-blobGo 1.18 or later
A Flashback Repository with a valid API key (READ or WRITE) — see Create a Repository.
Network access to your Flashback Bridge endpoints
Install required packages:
go get github.com/aws/aws-sdk-go-v2@latest
go get github.com/aws/aws-sdk-go-v2/config
go get cloud.google.com/go/storage
go get github.com/Azure/azure-sdk-for-go/sdk/storage/azblobAWS S3
Configuration
Set up the S3 client with your Flashback endpoint and credentials:
# flashback_aws_config.py
import boto3
from botocore.client import Config
# Replace with your Flashback credentials and endpoint
ENDPOINT = "https://s3-us-east-1-aws.flashback.tech"
# Consider READ/WRITE API Information of the Repository
API_KEY_ID = "YOUR_API_KEY_ID"
API_SECRET = "YOUR_API_SECRET"
session = boto3.session.Session(
aws_access_key_id=API_KEY_ID,
aws_secret_access_key=API_SECRET
)
s3_client = session.client(
service_name="s3",
endpoint_url=ENDPOINT,
config=Config(signature_version="s3v4")
)// flashbackAwsConfig.js
import { S3Client } from "@aws-sdk/client-s3";
// Replace with your Flashback credentials and endpoint
const ENDPOINT = "https://s3-us-east-1-aws.flashback.tech";
const API_KEY_ID = "YOUR_API_KEY_ID";
const API_SECRET = "YOUR_API_SECRET";
export const s3Client = new S3Client({
endpoint: ENDPOINT,
region: "us-east-1",
credentials: {
accessKeyId: API_KEY_ID,
secretAccessKey: API_SECRET
},
forcePathStyle: true
});// flashback_aws_config.go
package flashback
import (
"context"
"github.com/aws/aws-sdk-go-v2/aws"
"github.com/aws/aws-sdk-go-v2/config"
"github.com/aws/aws-sdk-go-v2/credentials"
"github.com/aws/aws-sdk-go-v2/service/s3"
s3types "github.com/aws/aws-sdk-go-v2/service/s3/types"
)
func NewS3Client() (*s3.Client, error) {
// Replace with your Flashback endpoint and credentials
endpoint := "https://s3-us-east-1-aws.flashback.tech"
apiKey := "YOUR_API_KEY_ID"
apiSecret := "YOUR_API_SECRET"
// Load default config and override credentials and endpoint
cfg, err := config.LoadDefaultConfig(context.TODO(),
config.WithCredentialsProvider(credentials.NewStaticCredentialsProvider(apiKey, apiSecret, "")),
config.WithEndpointResolverWithOptions(aws.EndpointResolverWithOptionsFunc(
func(service, region string, opts ...interface{}) (aws.Endpoint, error) {
if service == s3.ServiceID {
return aws.Endpoint{
URL: endpoint,
SigningRegion: "us-east-1",
}, nil
}
return aws.Endpoint{}, &aws.EndpointNotFoundError{}
})),
)
if err != nil {
return nil, err
}
return s3.NewFromConfig(cfg, func(o *s3.Options) {
o.UsePathStyle = true
}), nil
}Upload a File
Upload a local file to your specified bucket:
# aws_upload.py
from flashback_aws_config import s3_client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-bucket-name"
FILE_PATH = "path/to/local/file.txt"
OBJECT_NAME = FILE_PATH.split("/")[-1]
s3_client.upload_file(
Filename=FILE_PATH,
Bucket=BUCKET_NAME,
Key=OBJECT_NAME
)
print(f"Uploaded {OBJECT_NAME} to {BUCKET_NAME}")// awsUpload.js
import { PutObjectCommand } from "@aws-sdk/client-s3";
import fs from "fs";
import { s3Client } from "./flashbackAwsConfig.js";
const BUCKET_NAME = "your-bucket-name";
const FILE_PATH = "path/to/local/file.txt";
const OBJECT_NAME = FILE_PATH.split("/").pop();
const BODY = fs.readFileSync(FILE_PATH);
(async () => {
await s3Client.send(new PutObjectCommand({
Bucket: BUCKET_NAME,
Key: OBJECT_NAME,
Body
}));
console.log(`Uploaded ${OBJECT_NAME} to ${BUCKET_NAME}`);
})();// aws_upload.go
package main
import (
"context"
"fmt"
"flashback"
"os"
)
func main() {
client, err := flashback.NewS3Client()
if err != nil {
panic(err)
}
bucket := "your-bucket-name"
key := "file.txt"
file, err := os.Open("path/to/local/file.txt")
if err != nil {
panic(err)
}
defer file.Close()
_, err = client.PutObject(context.TODO(), &s3.PutObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
Body: file,
})
if err != nil {
panic(err)
}
fmt.Printf("Uploaded %s to %s\n", key, bucket)
}Download a File
Download an object from your repository to a local path:
# aws_download.py
from flashback_aws_config import s3_client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-bucket-name"
OBJECT_NAME = "file.txt"
DEST_PATH = "downloads/file.txt"
s3_client.download_file(
Bucket=BUCKET_NAME,
Key=OBJECT_NAME,
Filename=DEST_PATH
)
print(f"Downloaded {OBJECT_NAME} to {DEST_PATH}")// awsDownload.js
import { GetObjectCommand } from "@aws-sdk/client-s3";
import fs from "fs";
import { s3Client } from "./flashbackAwsConfig.js";
const BUCKET_NAME = "your-bucket-name";
const OBJECT_NAME = "file.txt";
const DEST_PATH = "downloads/file.txt";
(async () => {
const { Body } = await s3Client.send(new GetObjectCommand({
Bucket: BUCKET_NAME,
Key: OBJECT_NAME
}));
const chunks = [];
for await (let chunk of Body) chunks.push(chunk);
fs.writeFileSync(DEST_PATH, Buffer.concat(chunks));
console.log(`Downloaded ${OBJECT_NAME} to ${DEST_PATH}`);
})();// aws_download.go
package main
import (
"context"
"fmt"
"flashback"
"io"
"os"
)
func main() {
client, err := flashback.NewS3Client()
if err != nil {
panic(err)
}
bucket := "your-bucket-name"
key := "file.txt"
destPath := "downloads/file.txt"
resp, err := client.GetObject(context.TODO(), &s3.GetObjectInput{
Bucket: aws.String(bucket),
Key: aws.String(key),
})
if err != nil {
panic(err)
}
defer resp.Body.Close()
outFile, err := os.Create(destPath)
if err != nil {
panic(err)
}
defer outFile.Close()
if _, err = io.Copy(outFile, resp.Body); err != nil {
panic(err)
}
fmt.Printf("Downloaded %s to %s\n", key, destPath)
}Google Cloud Storage
Configuration
Initialize a GCS client pointing to your Flashback Bridge endpoint:
# flashback_gcs_config.py
from google.cloud import storage
from google.oauth2 import service_account
# Replace with your Flashback credentials and endpoint
ENDPOINT = "https://s3-us-east-1.aws.flashback.tech"
# Consider READ/WRITE API Information of the Repository
CLIENT_EMAIL = "YOUR_CLIENT_EMAIL"
PRIVATE_KEY = "YOUR_PRIVATE_KEY"
credentials = service_account.Credentials.from_service_account_info({
"type": "service_account",
"client_email": CLIENT_EMAIL,
"private_key": PRIVATE_KEY,
})
client = storage.Client(
credentials=credentials,
client_options={"api_endpoint": ENDPOINT}
)// flashbackGcsConfig.js
import { Storage } from '@google-cloud/storage';
// Replace with your Flashback credentials and endpoint
const ENDPOINT = 'https://s3-us-east-1.aws.flashback.tech';
const CLIENT_EMAIL = 'YOUR_CLIENT_EMAIL';
const PRIVATE_KEY = 'YOUR_PRIVATE_KEY';
// Initialize client
const storage = new Storage({
credentials: {
client_email: CLIENT_EMAIL,
private_key: PRIVATE_KEY.replace(/\\n/g, '\n'),
},
apiEndpoint: ENDPOINT.replace(/^https?:\/\//, ''),
});
export default storage;
)// flashback_gcs_config.go
package flashback
import (
"context"
"fmt"
"cloud.google.com/go/storage"
"google.golang.org/api/option"
)
const (
// Replace with your Flashback Bridge endpoint and service account info
ENDPOINT = "https://s3-us-east-1.aws.flashback.tech"
CLIENT_EMAIL = "YOUR_CLIENT_EMAIL"
PRIVATE_KEY = `-----BEGIN PRIVATE KEY-----
YOUR_PRIVATE_KEY_CONTENT
-----END PRIVATE KEY-----`
// The GCP project ID associated with your Flashback repository
PROJECT_ID = "your-gcp-project-id"
)
func NewGCSClient() (*storage.Client, string, error) {
ctx := context.Background()
credsJSON := []byte(fmt.Sprintf(
`{"type":"service_account","client_email":"%s","private_key":"%s"}`,
CLIENT_EMAIL, PRIVATE_KEY,
))
client, err := storage.NewClient(ctx,
option.WithCredentialsJSON(credsJSON),
option.WithEndpoint(ENDPOINT),
)
if err != nil {
return nil, "", err
}
return client, PROJECT_ID, nil
}Upload a File as a Blob
Upload a blob to the specified GCS bucket:
# gcs_upload.py
from flashback_gcs_config import client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-flashback-bucket-name"
FILE_PATH = "path/to/local/file.txt"
OBJECT_NAME = FILE_PATH.split("/")[-1]
bucket = client.bucket(BUCKET_NAME)
blob = bucket.blob(OBJECT_NAME)
blob.upload_from_filename(FILE_PATH)
print(f"Uploaded {OBJECT_NAME} to {BUCKET_NAME}")// gcsUpload.js
import storage from './flashbackGcsConfig.js';
const BUCKET_NAME = 'your-flashback-bucket-name';
const FILE_PATH = 'path/to/local/file.txt';
const OBJECT_NAME = FILE_PATH.split('/').pop();
(async () => {
const bucket = storage.bucket(BUCKET_NAME);
await bucket.upload(FILE_PATH, { destination: OBJECT_NAME });
console.log(`Uploaded ${OBJECT_NAME} to ${BUCKET_NAME}`);
})();// gcs_upload.go
package main
import (
"context"
"fmt"
"flashback"
"io"
"os"
)
func main() {
client, _, err := flashback.NewGCSClient()
if err != nil {
panic(err)
}
ctx := context.Background()
bucketName := "your-flashback-bucket-name"
filePath := "path/to/local/file.txt"
objectName := "file.txt"
bucket := client.Bucket(bucketName)
writer := bucket.Object(objectName).NewWriter(ctx)
f, err := os.Open(filePath)
if err != nil {
panic(err)
}
defer f.Close()
if _, err = io.Copy(writer, f); err != nil {
panic(err)
}
if err := writer.Close(); err != nil {
panic(err)
}
fmt.Printf("Uploaded %s to %s\n", objectName, bucketName)
}Download a File as a Blob
Download a blob from your repository:
# gcs_download.py
from flashback_gcs_config import client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
BUCKET_NAME = "your-bucket-name"
OBJECT_NAME = "file.txt"
DEST_PATH = "downloads/file.txt"
bucket = client.bucket(BUCKET_NAME)
blob = bucket.blob(OBJECT_NAME)
blob.download_to_filename(DEST_PATH)
print(f"Downloaded {OBJECT_NAME} to {DEST_PATH}")// gcsDownload.js
import storage from './flashbackGcsConfig.js';
const BUCKET_NAME = 'your-bucket-name';
const OBJECT_NAME = 'file.txt';
const DEST_PATH = 'downloads/file.txt';
(async () => {
const bucket = storage.bucket(BUCKET_NAME);
const blob = bucket.file(OBJECT_NAME);
await blob.download({ destination: DEST_PATH });
console.log(`Downloaded ${OBJECT_NAME} to ${DEST_PATH}`);
})();// gcs_download.go
package main
import (
"context"
"fmt"
"flashback"
"io"
"os"
)
func main() {
client, _, err := flashback.NewGCSClient()
if err != nil {
panic(err)
}
ctx := context.Background()
bucketName := "your-bucket-name"
objectName := "file.txt"
destPath := "downloads/file.txt"
reader, err := client.Bucket(bucketName).Object(objectName).NewReader(ctx)
if err != nil {
panic(err)
}
defer reader.Close()
f, err := os.Create(destPath)
if err != nil {
panic(err)
}
defer f.Close()
if _, err = io.Copy(f, reader); err != nil {
panic(err)
}
fmt.Printf("Downloaded %s to %s\n", objectName, destPath)
}Azure Blob Storage
Configuration
Create an Azure BlobServiceClient using your Flashback endpoint:
# flashback_azure_config.py
from azure.storage.blob import BlobServiceClient
# Replace with your Flashback endpoint and credentials
ENDPOINT = "https://s3-us-east-1.aws.flashback.tech"
# Consider READ/WRITE API Information of the Repository
CREDENTIAL = "YOUR_CREDENTIAL"
client = BlobServiceClient(
account_url=ENDPOINT,
credential=CREDENTIAL
)// flashbackAzureConfig.js
import { BlobServiceClient, StorageSharedKeyCredential } from "@azure/storage-blob";
// Replace with your Flashback endpoint and credential
const ENDPOINT = "https://s3-us-east-1.aws.flashback.tech";
const ACCOUNT = "YOUR_ACCOUNT_NAME";
const ACCOUNT_KEY= "YOUR_ACCOUNT_KEY";
const credential = new StorageSharedKeyCredential(ACCOUNT, ACCOUNT_KEY);
export const blobServiceClient = new BlobServiceClient(ENDPOINT, credential);// flashback_azure_config.go
package flashback
import (
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob"
)
// Replace with your Flashback endpoint and credentials
const (
ENDPOINT = "https://s3-us-east-1.aws.flashback.tech"
ACCOUNT_NAME = "YOUR_ACCOUNT_NAME"
ACCOUNT_KEY = "YOUR_ACCOUNT_KEY"
)
// NewBlobServiceClient returns an authenticated ServiceClient
func NewBlobServiceClient() (*azblob.ServiceClient, error) {
cred, err := azblob.NewSharedKeyCredential(ACCOUNT_NAME, ACCOUNT_KEY)
if err != nil {
return nil, err
}
return azblob.NewServiceClientWithSharedKey(ENDPOINT, cred, nil)
}Upload a File as a Blob
Upload a local file as a blob to your bucket:
# azure_upload.py
from flashback_azure_config import client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
CONTAINER_NAME = "your-bucket_name"
FILE_PATH = "path/to/local/file.txt"
BLOB_NAME = FILE_PATH.split("/")[-1]
blob_client = client.get_blob_client(container=CONTAINER_NAME, blob=BLOB_NAME)
with open(FILE_PATH, "rb") as data:
blob_client.upload_blob(data)
print(f"Uploaded {BLOB_NAME} to {CONTAINER_NAME}")// azureUpload.js
import { blobServiceClient } from "./flashbackAzureConfig.js";
import fs from "fs";
const CONTAINER_NAME = "your-bucket-name";
const FILE_PATH = "path/to/local/file.txt";
const BLOB_NAME = FILE_PATH.split("/").pop();
(async () => {
const containerClient = blobServiceClient.getContainerClient(CONTAINER_NAME);
const blockBlobClient = containerClient.getBlockBlobClient(BLOB_NAME);
await blockBlobClient.uploadFile(FILE_PATH);
console.log(`Uploaded ${BLOB_NAME} to ${CONTAINER_NAME}`);
})();// azure_upload.go
package main
import (
"context"
"fmt"
"flashback"
"github.com/Azure/azure-sdk-for-go/sdk/storage/azblob"
)
func main() {
client, err := flashback.NewBlobServiceClient()
if err != nil {
panic(err)
}
containerName := "your-bucket-name"
blobName := "file.txt"
localPath := "path/to/local/file.txt"
containerClient := client.NewContainerClient(containerName)
blobClient := containerClient.NewBlockBlobClient(blobName)
resp, err := blobClient.UploadFile(context.Background(), localPath, nil)
if err != nil {
panic(err)
}
fmt.Printf("Uploaded %s to %s (request ID: %s)\n", blobName, containerName, *resp.RequestID)
}Download a File as a Blob
Download a blob from your container to a local path:
# azure_download.py
from flashback_azure_config import client
#S3 Bucket, GCS Bucket, or Azure Container listed in your Repository
CONTAINER_NAME = "your-flashback-bucket_name"
BLOB_NAME = "file.txt"
DEST_PATH = "downloads/file.txt"
blob_client = client.get_blob_client(container=CONTAINER_NAME, blob=BLOB_NAME)
with open(DEST_PATH, "wb") as file:
data = blob_client.download_blob()
file.write(data.readall())
print(f"Downloaded {BLOB_NAME} to {DEST_PATH}")// azureDownload.js
import { blobServiceClient } from "./flashbackAzureConfig.js";
import fs from "fs";
const CONTAINER_NAME = "your-flashback-bucket-name";
const BLOB_NAME = "file.txt";
const DEST_PATH = "downloads/file.txt";
(async () => {
const containerClient = blobServiceClient.getContainerClient(CONTAINER_NAME);
const blockBlobClient = containerClient.getBlockBlobClient(BLOB_NAME);
await blockBlobClient.downloadToFile(DEST_PATH);
console.log(`Downloaded ${BLOB_NAME} to ${DEST_PATH}`);
})();// azure_download.go
package main
import (
"context"
"fmt"
"io"
"os"
"flashback"
)
func main() {
client, err := flashback.NewBlobServiceClient()
if err != nil {
panic(err)
}
containerName := "your-flashback-bucket-name"
blobName := "file.txt"
destPath := "downloads/file.txt"
containerClient := client.NewContainerClient(containerName)
blobClient := containerClient.NewBlockBlobClient(blobName)
downloadResp, err := blobClient.DownloadStream(context.Background(), nil)
if err != nil {
panic(err)
}
defer downloadResp.Body.Close()
outFile, err := os.Create(destPath)
if err != nil {
panic(err)
}
defer outFile.Close()
if _, err = io.Copy(outFile, downloadResp.Body); err != nil {
panic(err)
}
fmt.Printf("Downloaded %s to %s\n", blobName, destPath)
}Next Steps
Explore additional storage API operations: delete, copy, multipart uploads, etc.
Integrate and modify these snippets into your applications.
Last updated
Was this helpful?