Add automated Cloudflare R2 bucket provisioning

This commit is contained in:
thePR0M3TH3AN
2025-09-26 20:46:18 -04:00
parent f121a17cd9
commit 9b4dd2a6ff
5 changed files with 587 additions and 496 deletions

131
js/storage/r2-mgmt.js Normal file
View File

@@ -0,0 +1,131 @@
// js/storage/r2-mgmt.js
export function sanitizeBucketName(npub) {
const base = (npub || "user")
.toLowerCase()
.replace(/[^a-z0-9-]/g, "")
.replace(/^-+|[-]+$/g, "");
const suffix = Date.now().toString(36);
const name = `bv-${base || "u"}-${suffix}`.slice(0, 63);
return name.length < 3 ? `bv-u-${suffix}` : name;
}
function buildError(error, fallbackMessage) {
if (error instanceof Error) {
return error;
}
const err = new Error(fallbackMessage || "Cloudflare request failed");
err.original = error;
return err;
}
// --- Cloudflare API helpers (Bearer token) ---
async function cfFetch(path, { token, method = "GET", body, headers = {} }) {
if (!token) {
throw new Error("Cloudflare API token is required");
}
const res = await fetch(`https://api.cloudflare.com/client/v4${path}`, {
method,
headers: {
Authorization: `Bearer ${token}`,
"Content-Type": "application/json",
...headers,
},
body: body ? JSON.stringify(body) : undefined,
});
const text = await res.text();
let data = {};
if (text) {
try {
data = JSON.parse(text);
} catch (parseErr) {
if (!res.ok) {
const err = new Error(text || `Cloudflare API error ${res.status}`);
err.status = res.status;
throw err;
}
throw parseErr;
}
}
if (!res.ok || data?.success === false) {
const errorMessage =
data?.errors?.[0]?.message ||
data?.message ||
text ||
`Cloudflare API error ${res.status}`;
const err = new Error(errorMessage);
err.status = res.status;
err.response = data;
throw err;
}
return data;
}
// 1) Create bucket (idempotent-ish: treat 409 as success)
export async function ensureBucket({ accountId, bucket, token }) {
try {
await cfFetch(`/accounts/${accountId}/r2/buckets`, {
token,
method: "POST",
body: { name: bucket },
});
} catch (error) {
const err = buildError(error);
if (err.status === 409 || /already exists/i.test(err.message || "")) {
return;
}
throw err;
}
}
// 2) Set CORS so browser PUT/GETs work from your app origins
export async function putCors({ accountId, bucket, token, origins }) {
const filteredOrigins = (origins || []).filter(Boolean);
if (filteredOrigins.length === 0) {
return;
}
const rules = [
{
AllowedOrigins: filteredOrigins,
AllowedMethods: ["GET", "HEAD", "PUT", "POST"],
AllowedHeaders: ["*"],
ExposeHeaders: ["ETag", "Content-Length", "Content-Range"],
MaxAgeSeconds: 3600,
},
];
await cfFetch(`/accounts/${accountId}/r2/buckets/${bucket}/cors`, {
token,
method: "PUT",
body: { rules },
});
}
// 3) Attach custom domain to bucket (auto-provisions on Cloudflare)
export async function attachCustomDomain({
accountId,
bucket,
token,
zoneId,
domain,
}) {
const { result } = await cfFetch(
`/accounts/${accountId}/r2/buckets/${bucket}/domains/custom`,
{
token,
method: "POST",
body: { domain, zoneId, enabled: true, minTLS: "1.2" },
}
);
return `https://${result?.domain || domain}`;
}
// 4) Or enable r2.dev managed domain as fallback
export async function enableManagedDomain({ accountId, bucket, token }) {
const { result } = await cfFetch(
`/accounts/${accountId}/r2/buckets/${bucket}/domains/managed`,
{ token, method: "PUT", body: { enabled: true } }
);
return `https://${result?.domain}`;
}

138
js/storage/r2-s3.js Normal file
View File

@@ -0,0 +1,138 @@
// js/storage/r2-s3.js
import {
S3Client,
CreateMultipartUploadCommand,
UploadPartCommand,
CompleteMultipartUploadCommand,
AbortMultipartUploadCommand,
} from "https://esm.sh/@aws-sdk/client-s3@3.614.0?target=es2022&bundle";
export function makeR2Client({ accountId, accessKeyId, secretAccessKey }) {
if (!accountId || !accessKeyId || !secretAccessKey) {
throw new Error("Missing Cloudflare R2 credentials");
}
return new S3Client({
region: "auto",
endpoint: `https://${accountId}.r2.cloudflarestorage.com`,
credentials: { accessKeyId, secretAccessKey },
forcePathStyle: true,
});
}
export async function multipartUpload({
s3,
bucket,
key,
file,
contentType,
onProgress,
concurrency = 4,
}) {
if (!s3) {
throw new Error("S3 client is required");
}
if (!bucket) {
throw new Error("Bucket is required");
}
if (!key) {
throw new Error("Object key is required");
}
if (!file) {
throw new Error("File is required");
}
const { UploadId } = await s3.send(
new CreateMultipartUploadCommand({
Bucket: bucket,
Key: key,
ContentType: contentType || file.type || "video/mp4",
CacheControl: "public, max-age=31536000, immutable",
})
);
if (!UploadId) {
throw new Error("Failed to start multipart upload");
}
const PART = 8 * 1024 * 1024;
const total = file.size;
const parts = [];
let sent = 0;
let partNumber = 1;
const totalParts = Math.ceil(total / PART);
const errors = [];
const uploadPart = async () => {
const start = sent;
if (start >= total) {
return null;
}
const end = Math.min(start + PART, total);
const body = file.slice(start, end);
const currentPart = partNumber++;
sent = end;
return s3
.send(
new UploadPartCommand({
Bucket: bucket,
Key: key,
UploadId,
PartNumber: currentPart,
Body: body,
})
)
.then(({ ETag }) => {
parts.push({ ETag, PartNumber: currentPart });
if (typeof onProgress === "function") {
onProgress(end / total);
}
});
};
const workers = Array.from({ length: Math.max(1, concurrency) }, () =>
(async () => {
try {
while (parts.length < totalParts) {
const task = uploadPart();
if (!task) {
break;
}
await task;
}
} catch (error) {
errors.push(error);
}
})()
);
try {
await Promise.all(workers);
if (errors.length > 0) {
throw errors[0];
}
parts.sort((a, b) => a.PartNumber - b.PartNumber);
await s3.send(
new CompleteMultipartUploadCommand({
Bucket: bucket,
Key: key,
UploadId,
MultipartUpload: { Parts: parts },
})
);
} catch (error) {
await s3
.send(
new AbortMultipartUploadCommand({
Bucket: bucket,
Key: key,
UploadId,
})
)
.catch(() => {});
throw error;
}
}