100 lines
2.9 KiB
TypeScript
100 lines
2.9 KiB
TypeScript
import { S3Client, GetObjectCommand } from "@aws-sdk/client-s3";
|
|
import fs from "fs";
|
|
import path from "path";
|
|
import dotenv from "dotenv";
|
|
import { fileURLToPath } from "url";
|
|
|
|
dotenv.config();
|
|
|
|
const __filename = fileURLToPath(import.meta.url);
|
|
const __dirname = path.dirname(__filename);
|
|
|
|
const client = new S3Client({
|
|
region: process.env.S3_REGION || "fsn1",
|
|
endpoint: process.env.S3_ENDPOINT,
|
|
credentials: {
|
|
accessKeyId: process.env.S3_ACCESS_KEY || "",
|
|
secretAccessKey: process.env.S3_SECRET_KEY || "",
|
|
},
|
|
forcePathStyle: true,
|
|
});
|
|
|
|
async function downloadFile(key: string, localPath: string) {
|
|
try {
|
|
const bucket = process.env.S3_BUCKET || "mintel";
|
|
const command = new GetObjectCommand({
|
|
Bucket: bucket,
|
|
Key: key,
|
|
});
|
|
const response = await client.send(command);
|
|
|
|
if (response.Body) {
|
|
const dir = path.dirname(localPath);
|
|
if (!fs.existsSync(dir)) {
|
|
fs.mkdirSync(dir, { recursive: true });
|
|
}
|
|
|
|
const stream = fs.createWriteStream(localPath);
|
|
const reader = response.Body as any;
|
|
|
|
// Node.js stream handling
|
|
if (typeof reader.pipe === "function") {
|
|
reader.pipe(stream);
|
|
} else {
|
|
// Alternative for web streams if necessary, but in Node it should have pipe
|
|
const arr = await response.Body.transformToByteArray();
|
|
fs.writeFileSync(localPath, arr);
|
|
}
|
|
|
|
return new Promise((resolve, reject) => {
|
|
stream.on("finish", resolve);
|
|
stream.on("error", reject);
|
|
});
|
|
}
|
|
} catch (err) {
|
|
console.error(`Failed to download ${key}:`, err);
|
|
}
|
|
}
|
|
|
|
function parseMatter(content: string) {
|
|
const match = content.match(/^---\n([\s\S]*?)\n---\n([\s\S]*)$/);
|
|
if (!match) return { data: {}, content };
|
|
const data: Record<string, any> = {};
|
|
match[1].split("\n").forEach((line) => {
|
|
const [key, ...rest] = line.split(":");
|
|
if (key && rest.length) {
|
|
const field = key.trim();
|
|
let val = rest.join(":").trim();
|
|
data[field] = val.replace(/^["']|["']$/g, "");
|
|
}
|
|
});
|
|
return { data, content: match[2].trim() };
|
|
}
|
|
|
|
async function run() {
|
|
const webDir = path.resolve(__dirname, "..");
|
|
const contentDir = path.join(webDir, "content", "blog");
|
|
const publicDir = path.join(webDir, "public");
|
|
const prefix = `${process.env.S3_PREFIX || "mintel-me"}/media/`;
|
|
|
|
const files = fs.readdirSync(contentDir).filter((f) => f.endsWith(".mdx"));
|
|
|
|
for (const file of files) {
|
|
const content = fs.readFileSync(path.join(contentDir, file), "utf-8");
|
|
const { data } = parseMatter(content);
|
|
|
|
if (data.thumbnail) {
|
|
const fileName = path.basename(data.thumbnail);
|
|
const s3Key = `${prefix}${fileName}`;
|
|
const localPath = path.join(publicDir, data.thumbnail.replace(/^\//, ""));
|
|
|
|
console.log(`Downloading ${s3Key} to ${localPath}...`);
|
|
await downloadFile(s3Key, localPath);
|
|
}
|
|
}
|
|
|
|
console.log("Downloads complete.");
|
|
}
|
|
|
|
run();
|