Updated robots file

This commit is contained in:
2025-06-27 19:15:06 +02:00
parent 18c6aa1758
commit 53a4e5507f

View File

@@ -1,11 +1,28 @@
import { getAllUrls } from '@/lib/db/urls'
import type { MetadataRoute } from 'next'
export default function robots(): MetadataRoute.Robots {
return {
rules: {
userAgent: "*",
allow: ["/", "/r/"],
disallow: "/dashboard/",
export default async function robots(): Promise<MetadataRoute.Robots> {
const urls = await getAllUrls()
const crawlableUrls = urls.map(u => {
if (u.crawlable) {
return `/r/${u.slug}`
}
}).filter(v => typeof v === 'string')
return {
rules: [
{
userAgent: "*",
allow: "/",
disallow: ["/api", "/dasboard", "/sign-in"],
crawlDelay: 1
}, {
userAgent: "*",
disallow: "/r/",
allow: crawlableUrls,
crawlDelay: 1
}
]
}
}