All checks were successful
Build & Deploy / 🔍 Prepare (push) Successful in 6s
Build & Deploy / 🧪 QA (push) Successful in 2m32s
Build & Deploy / 🏗️ Build (push) Successful in 5m3s
Build & Deploy / 🚀 Deploy (push) Successful in 16s
Build & Deploy / 🧪 Post-Deploy Verification (push) Successful in 5m10s
Build & Deploy / 🔔 Notify (push) Successful in 1s
chore(release): bump version to 2.2.9
324 lines
10 KiB
TypeScript
324 lines
10 KiB
TypeScript
import { getPayload } from 'payload';
|
|
import configPromise from '@payload-config';
|
|
import { config } from '@/lib/config';
|
|
|
|
export function extractExcerpt(content: string): string {
|
|
if (!content) return '';
|
|
// Remove frontmatter if present (though matter() usually strips it out)
|
|
let text = content.replace(/^---[\s\S]*?---/, '');
|
|
// Remove MDX component imports and usages
|
|
text = text.replace(/<[^>]+>/g, '');
|
|
text = text.replace(/^[ \t]*import\s+.*$/gm, '');
|
|
text = text.replace(/^[ \t]*export\s+.*$/gm, '');
|
|
// Remove markdown headings
|
|
text = text.replace(/^#+.*$/gm, '');
|
|
// Extract first paragraph or combined lines
|
|
const paragraphs = text
|
|
.split(/\n\s*\n/)
|
|
.filter((p) => p.trim() && !p.trim().startsWith('---') && !p.trim().startsWith('#'));
|
|
if (paragraphs.length === 0) return '';
|
|
|
|
const excerpt = paragraphs[0]
|
|
.replace(/[*_`]/g, '') // remove markdown bold/italic/code
|
|
.replace(/\[(.*?)\]\(.*?\)/g, '$1') // replace links with their text
|
|
.replace(/\s+/g, ' ')
|
|
.trim();
|
|
|
|
return excerpt.length > 200 ? excerpt.slice(0, 197) + '...' : excerpt;
|
|
}
|
|
|
|
export interface PostFrontmatter {
|
|
title: string;
|
|
date: string;
|
|
excerpt?: string;
|
|
featuredImage?: string | null;
|
|
focalX?: number;
|
|
focalY?: number;
|
|
category?: string;
|
|
public?: boolean;
|
|
}
|
|
|
|
export interface PostData {
|
|
slug: string;
|
|
frontmatter: PostFrontmatter;
|
|
content: any; // Mapped to Lexical SerializedEditorState
|
|
}
|
|
|
|
export function isPostVisible(post: { frontmatter: { date: string; public?: boolean } }) {
|
|
// If explicitly marked as not public, hide in production
|
|
if (post.frontmatter.public === false && config.isProduction) {
|
|
return false;
|
|
}
|
|
|
|
const postDate = new Date(post.frontmatter.date);
|
|
const now = new Date();
|
|
return !(postDate > now && config.isProduction);
|
|
}
|
|
|
|
export async function getPostBySlug(slug: string, locale: string): Promise<PostData | null> {
|
|
try {
|
|
const payload = await getPayload({ config: configPromise });
|
|
|
|
// First try: Find in the requested locale
|
|
let { docs } = await payload.find({
|
|
collection: 'posts',
|
|
where: {
|
|
slug: { equals: slug },
|
|
...(!config.showDrafts ? { _status: { equals: 'published' } } : {}),
|
|
},
|
|
locale: locale as any,
|
|
draft: config.showDrafts,
|
|
limit: 1,
|
|
});
|
|
|
|
// Fallback: If not found, try searching across all locales.
|
|
// This happens when a user uses the static language switcher
|
|
// e.g. switching from /en/blog/en-slug to /de/blog/en-slug.
|
|
if (!docs || docs.length === 0) {
|
|
const { docs: crossLocaleDocs } = await payload.find({
|
|
collection: 'posts',
|
|
where: {
|
|
slug: { equals: slug },
|
|
...(!config.showDrafts ? { _status: { equals: 'published' } } : {}),
|
|
},
|
|
locale: 'all',
|
|
draft: config.showDrafts,
|
|
limit: 1,
|
|
});
|
|
|
|
if (crossLocaleDocs && crossLocaleDocs.length > 0) {
|
|
// Fetch the found document again, but strictly in the requested locale
|
|
// so we get the correctly translated fields (like the localized slug)
|
|
const { docs: correctLocaleDocs } = await payload.find({
|
|
collection: 'posts',
|
|
where: {
|
|
id: { equals: crossLocaleDocs[0].id },
|
|
},
|
|
locale: locale as any,
|
|
draft: config.showDrafts,
|
|
limit: 1,
|
|
});
|
|
|
|
docs = correctLocaleDocs;
|
|
}
|
|
}
|
|
|
|
if (!docs || docs.length === 0) return null;
|
|
|
|
const doc = docs[0];
|
|
|
|
return {
|
|
slug: doc.slug,
|
|
frontmatter: {
|
|
title: doc.title,
|
|
date: doc.date,
|
|
excerpt: doc.excerpt || '',
|
|
category: doc.category || '',
|
|
featuredImage:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.url || doc.featuredImage.sizes?.card?.url
|
|
: null,
|
|
focalX:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.focalX
|
|
: 50,
|
|
focalY:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.focalY
|
|
: 50,
|
|
public: doc._status === 'published',
|
|
} as PostFrontmatter,
|
|
content: doc.content as any, // Native Lexical Editor State
|
|
};
|
|
} catch (error) {
|
|
console.error(`[Payload] getPostBySlug failed for ${slug}:`, error);
|
|
return null;
|
|
}
|
|
}
|
|
|
|
export async function getAllPosts(locale: string): Promise<PostData[]> {
|
|
try {
|
|
const payload = await getPayload({ config: configPromise });
|
|
const { docs } = await payload.find({
|
|
collection: 'posts',
|
|
where: {
|
|
...(!config.showDrafts ? { _status: { equals: 'published' } } : {}),
|
|
},
|
|
locale: locale as any,
|
|
sort: '-date',
|
|
draft: config.showDrafts,
|
|
limit: 100,
|
|
});
|
|
|
|
console.log(`[Payload] getAllPosts for ${locale}: Found ${docs.length} docs`);
|
|
|
|
const posts = docs.map((doc) => {
|
|
return {
|
|
slug: doc.slug,
|
|
frontmatter: {
|
|
title: doc.title,
|
|
date: doc.date,
|
|
excerpt: doc.excerpt || '',
|
|
category: doc.category || '',
|
|
featuredImage:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.url || doc.featuredImage.sizes?.card?.url
|
|
: null,
|
|
focalX:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.focalX
|
|
: 50,
|
|
focalY:
|
|
typeof doc.featuredImage === 'object' && doc.featuredImage !== null
|
|
? doc.featuredImage.focalY
|
|
: 50,
|
|
} as PostFrontmatter,
|
|
// Pass the Lexical content object rather than raw markdown string
|
|
content: doc.content as any,
|
|
};
|
|
});
|
|
|
|
// Integrity check: only show posts with a featured image in listings/sitemap
|
|
return posts.filter((p) => !!p.frontmatter.featuredImage);
|
|
} catch (error) {
|
|
console.error(`[Payload] getAllPosts failed for ${locale}:`, error);
|
|
return [];
|
|
}
|
|
}
|
|
|
|
export async function getAllPostsMetadata(locale: string): Promise<Partial<PostData>[]> {
|
|
const posts = await getAllPosts(locale);
|
|
return posts.map((p) => ({
|
|
slug: p.slug,
|
|
frontmatter: p.frontmatter,
|
|
}));
|
|
}
|
|
|
|
export async function getAdjacentPosts(
|
|
slug: string,
|
|
locale: string,
|
|
): Promise<{
|
|
prev: PostData | null;
|
|
next: PostData | null;
|
|
isPrevRandom?: boolean;
|
|
isNextRandom?: boolean;
|
|
}> {
|
|
const posts = await getAllPosts(locale);
|
|
const currentIndex = posts.findIndex((post) => post.slug === slug);
|
|
|
|
if (currentIndex === -1) {
|
|
return { prev: null, next: null };
|
|
}
|
|
|
|
// Posts are sorted by date descending (newest first)
|
|
// So "next" post (newer) is at index - 1
|
|
// And "previous" post (older) is at index + 1
|
|
let next = currentIndex > 0 ? posts[currentIndex - 1] : null;
|
|
let prev = currentIndex < posts.length - 1 ? posts[currentIndex + 1] : null;
|
|
|
|
let isNextRandom = false;
|
|
let isPrevRandom = false;
|
|
|
|
const getRandomPost = (excludeSlugs: string[]) => {
|
|
const available = posts.filter((p) => !excludeSlugs.includes(p.slug));
|
|
if (available.length === 0) return null;
|
|
return available[Math.floor(Math.random() * available.length)];
|
|
};
|
|
|
|
// If there's no next post (we are at the newest post), show a random post instead
|
|
if (!next && posts.length > 2) {
|
|
next = getRandomPost([slug, prev?.slug].filter(Boolean) as string[]);
|
|
isNextRandom = true;
|
|
}
|
|
|
|
// If there's no previous post (we are at the oldest post), show a random post instead
|
|
if (!prev && posts.length > 2) {
|
|
prev = getRandomPost([slug, next?.slug].filter(Boolean) as string[]);
|
|
isPrevRandom = true;
|
|
}
|
|
|
|
return { prev, next, isPrevRandom, isNextRandom };
|
|
}
|
|
|
|
export function getReadingTime(content: string): number {
|
|
const wordsPerMinute = 200;
|
|
const noOfWords = content.split(/\s/g).length;
|
|
const minutes = noOfWords / wordsPerMinute;
|
|
return Math.ceil(minutes);
|
|
}
|
|
|
|
export function generateHeadingId(text: string): string {
|
|
let id = text.toLowerCase();
|
|
id = id.replace(/ä/g, 'ae');
|
|
id = id.replace(/ö/g, 'oe');
|
|
id = id.replace(/ü/g, 'ue');
|
|
id = id.replace(/ß/g, 'ss');
|
|
|
|
id = id.replace(/[*_`]/g, '');
|
|
id = id.replace(/[^\w\s-]/g, '');
|
|
id = id
|
|
.replace(/\s+/g, '-')
|
|
.replace(/-+/g, '-')
|
|
.replace(/^-+|-+$/g, '');
|
|
|
|
return id || 'heading';
|
|
}
|
|
|
|
export function getTextContent(node: any): string {
|
|
if (typeof node === 'string') return node;
|
|
if (typeof node === 'number') return node.toString();
|
|
if (Array.isArray(node)) return node.map(getTextContent).join('');
|
|
if (node && typeof node === 'object' && node.props && node.props.children) {
|
|
return getTextContent(node.props.children);
|
|
}
|
|
return '';
|
|
}
|
|
|
|
export function getHeadings(content: string): { id: string; text: string; level: number }[] {
|
|
const headingLines = content.split('\n').filter((line) => line.match(/^#{1,3}\s/));
|
|
|
|
return headingLines.map((line) => {
|
|
const level = (line.match(/^#+/)?.[0].length || 0) + 1; // Shift H1->H2, H2->H3, H3->H4
|
|
const rawText = line.replace(/^#+\s/, '').trim();
|
|
const cleanText = rawText.replace(/[*_`]/g, '');
|
|
const id = generateHeadingId(cleanText);
|
|
|
|
return { id, text: cleanText, level };
|
|
});
|
|
}
|
|
|
|
export function extractLexicalHeadings(
|
|
node: any,
|
|
headings: { id: string; text: string; level: number }[] = [],
|
|
): { id: string; text: string; level: number }[] {
|
|
if (!node) return headings;
|
|
|
|
if (node.type === 'heading' && node.tag) {
|
|
const level = parseInt(node.tag.replace('h', ''));
|
|
const text = getTextContentFromLexical(node);
|
|
if (text) {
|
|
headings.push({
|
|
id: generateHeadingId(text),
|
|
text,
|
|
level,
|
|
});
|
|
}
|
|
}
|
|
|
|
if (node.children && Array.isArray(node.children)) {
|
|
node.children.forEach((child: any) => extractLexicalHeadings(child, headings));
|
|
}
|
|
|
|
return headings;
|
|
}
|
|
|
|
function getTextContentFromLexical(node: any): string {
|
|
if (node.type === 'text') {
|
|
return node.text || '';
|
|
}
|
|
if (node.children && Array.isArray(node.children)) {
|
|
return node.children.map(getTextContentFromLexical).join('');
|
|
}
|
|
return '';
|
|
}
|