chore: Refactor getBackgroundImages to improve image URL generation

This commit is contained in:
Sean Morley
2024-06-12 19:59:53 +00:00
parent 8eb9f11708
commit 71363026b2
5 changed files with 50 additions and 2 deletions

View File

@@ -1,6 +1,13 @@
import { ensureBucketExists, getObjectUrl, s3Client } from "$lib/server/s3";
import { ListObjectsV2Command } from "@aws-sdk/client-s3";
/**
* Retrieves a random background image URL from the "backgrounds" bucket.
* If the randomly selected image is ".emptyFolderPlaceholder", it recursively calls itself to get another image.
* If no images are found in the bucket, a default image URL is returned.
*
* @returns A Promise that resolves to a string representing the URL of the background image.
*/
export const getBackgroundImages = async (): Promise<string> => {
await ensureBucketExists("backgrounds");

View File

@@ -10,6 +10,9 @@ import {
import { env } from "$env/dynamic/private";
console.log(env.AWS_ACCESS_KEY_ID as string);
/**
* Configuration object for S3 client.
*/
const s3Config: S3ClientConfig = {
region: (env.AWS_REGION as string) || "us-east-1",
credentials: {
@@ -22,6 +25,12 @@ const s3Config: S3ClientConfig = {
export const s3Client = new S3Client(s3Config);
/**
* Ensures that a bucket exists in Amazon S3. If the bucket does not exist, it creates the bucket and sets a bucket policy to allow public read access.
* If the bucket already exists, it logs a message indicating that the bucket already exists.
* @param bucketName - The name of the bucket to ensure exists.
* @throws Throws an error if there is an issue with creating the bucket or setting the bucket policy.
*/
export const ensureBucketExists = async (bucketName: string): Promise<void> => {
const headBucketCommand = new HeadBucketCommand({ Bucket: bucketName });
@@ -71,6 +80,14 @@ export const ensureBucketExists = async (bucketName: string): Promise<void> => {
}
};
/**
* Uploads an object to an S3 bucket.
* @param bucketName - The name of the S3 bucket.
* @param fileName - The name of the file to be uploaded.
* @param fileBuffer - The file content as a Buffer.
* @returns A Promise that resolves to the URL of the uploaded object.
* @throws If there is an error during the upload process.
*/
export const uploadObject = async (
bucketName: string,
fileName: string,
@@ -97,6 +114,12 @@ export const uploadObject = async (
}
};
/**
* Deletes an object from an S3 bucket.
* @param bucketName - The name of the S3 bucket.
* @param fileName - The name of the file to delete.
* @throws Throws an error if there is an issue deleting the object.
*/
export const deleteObject = async (bucketName: string, fileName: string) => {
const deleteObjectCommand = new DeleteObjectCommand({
Bucket: bucketName,
@@ -114,6 +137,12 @@ export const deleteObject = async (bucketName: string, fileName: string) => {
}
};
/**
* Returns the URL of an object in the specified bucket.
* @param bucketName - The name of the bucket.
* @param fileName - The name of the file.
* @returns The URL of the object.
*/
export const getObjectUrl = (bucketName: string, fileName: string): string => {
let objectUrl: string;
let endpoint: string = "";
@@ -123,6 +152,9 @@ export const getObjectUrl = (bucketName: string, fileName: string): string => {
endpoint = env.AWS_S3_ENDPOINT as string;
}
// This code is not as clean as it could be, but it works for whats needed. Help is welcome to clean it up!
// Currently supports Amazon S3, Google Cloud Storage, DigitalOcean Spaces, and Supabase Storage as well as self-hosted MinIO.
if (endpoint.includes("amazonaws.com")) {
// Amazon S3
objectUrl = `https://${bucketName}.s3.${env.AWS_REGION}.amazonaws.com/${fileName}`;