feat: use s3-lite-client
All checks were successful
/ run (push) Successful in 4s

This commit is contained in:
nyyu 2025-05-31 11:40:28 +02:00
parent f785885712
commit 36625ad7a9
6 changed files with 57 additions and 1178 deletions

2
.gitignore vendored
View file

@ -1,3 +1,5 @@
/build
/cache
/node_modules
.env
.vscode

9
deno.json Normal file
View file

@ -0,0 +1,9 @@
{
"imports": {
"@bradenmacdonald/s3-lite-client": "jsr:@bradenmacdonald/s3-lite-client@^0.9.0",
"node-html-parser": "npm:node-html-parser@^7.0.1"
},
"tasks": {
"start": "deno --allow-env --allow-read --allow-net --allow-sys src/index.ts"
}
}

1104
deno.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,15 +0,0 @@
{
"name": "check-bios",
"version": "1.0.0",
"type": "module",
"scripts": {
"start": "deno --allow-env --allow-read --allow-net --allow-sys src/index.ts"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.820.0",
"node-html-parser": "^7.0.1"
},
"devDependencies": {
"@types/node": "^22.15.27"
}
}

View file

@ -1,5 +1,5 @@
import { Bios, getBios } from "./bios.ts";
import { downloadFile, uploadFile } from "./s3.ts";
import { getObject, putObject } from "./s3.ts";
import { notify } from "./notify.ts";
import process from "node:process";
@ -15,8 +15,7 @@ async function main() {
console.log(`Found ${biosList.length} BIOS versions online`);
// Get current list from S3
const file = await downloadFile(BIOS_LIST_KEY, BIOS_BUCKET);
const curList = file ? (JSON.parse(file) as Bios[]) : [];
const curList = await getObject<Bios[]>(BIOS_LIST_KEY, BIOS_BUCKET) || [];
console.log(`Current BIOS versions in storage: ${curList.length}`);
// Find new versions
@ -28,8 +27,8 @@ async function main() {
console.log(`Found ${newBios.length} new BIOS version(s)`);
// Update S3 storage
const uploaded = await uploadFile(
JSON.stringify(biosList, null, 2),
const uploaded = await putObject(
biosList,
BIOS_LIST_KEY,
BIOS_BUCKET,
);

View file

@ -1,11 +1,4 @@
import {
GetObjectCommand,
PutObjectCommand,
S3Client,
S3ServiceException,
} from "@aws-sdk/client-s3";
import { Readable } from "node:stream";
import { Buffer } from "node:buffer";
import { S3Client } from "@bradenmacdonald/s3-lite-client";
import process from "node:process";
// Validate required environment variables
@ -21,59 +14,36 @@ for (const envVar of requiredEnvVars) {
}
}
const s3 = new S3Client({
const s3client = new S3Client({
region: process.env.AWS_DEFAULT_REGION!,
endpoint: process.env.AWS_ENDPOINT_URL,
forcePathStyle: true,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!,
},
requestChecksumCalculation: "WHEN_REQUIRED",
endPoint: process.env.AWS_ENDPOINT_URL!,
accessKey: process.env.AWS_ACCESS_KEY_ID!,
secretKey: process.env.AWS_SECRET_ACCESS_KEY!,
});
/**
* Converts a readable stream to string
* @param stream - The readable stream to convert
* @returns Promise containing the stream contents as string
*/
async function streamToString(stream: Readable): Promise<string> {
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk));
}
return Buffer.concat(chunks).toString("utf-8");
}
/**
* Downloads a file from S3
* get object from S3
* @param key - The S3 object key
* @param bucket - The S3 bucket name
* @returns Promise containing the file contents as string or null if failed
* @returns Promise containing the content
* @throws Error if key or bucket is empty
*/
async function downloadFile(
async function getObject<Type>(
key: string,
bucket: string,
): Promise<string | null> {
): Promise<Type | null> {
if (!key || !bucket) {
throw new Error("Key and bucket must not be empty");
}
const command = new GetObjectCommand({ Bucket: bucket, Key: key });
try {
const response = await s3.send(command);
if (!response.Body) {
const response = await s3client.getObject(key, { bucketName: bucket });
if (!response.ok) {
throw new Error("Empty response body");
}
const body = response.Body as Readable;
return await streamToString(body);
return response.json();
} catch (error) {
if (error instanceof S3ServiceException) {
console.error(`S3 error downloading ${key}:`, error.message);
} else {
console.error(`Unexpected error downloading ${key}:`, error);
}
return null;
}
}
@ -87,8 +57,8 @@ async function downloadFile(
* @returns Promise<boolean> indicating success or failure
* @throws Error if key or bucket is empty
*/
async function uploadFile(
content: string,
async function putObject<Type>(
content: Type,
key: string,
bucket: string,
contentType = "application/json",
@ -97,26 +67,24 @@ async function uploadFile(
throw new Error("Key and bucket must not be empty");
}
const command = new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: content,
ContentType: contentType,
});
try {
const response = await s3.send(command);
const success = response.$metadata.httpStatusCode === 200;
const response = await s3client.putObject(
key,
JSON.stringify(content, null, 2),
{
bucketName: bucket,
metadata: {
"Content-Type": contentType,
},
},
);
const success = response.versionId !== undefined;
console.log(`File uploaded: ${key}, ${success ? "Success" : "Failed"}`);
return success;
} catch (error) {
if (error instanceof S3ServiceException) {
console.error(`S3 error uploading ${key}:`, error.message);
} else {
console.error(`Unexpected error uploading ${key}:`, error);
}
return false;
}
}
export { downloadFile, uploadFile };
export { getObject, putObject };