Updated robots file
This commit is contained in:
@@ -1,11 +1,28 @@
|
|||||||
|
import { getAllUrls } from '@/lib/db/urls'
|
||||||
import type { MetadataRoute } from 'next'
|
import type { MetadataRoute } from 'next'
|
||||||
|
|
||||||
export default function robots(): MetadataRoute.Robots {
|
export default async function robots(): Promise<MetadataRoute.Robots> {
|
||||||
|
const urls = await getAllUrls()
|
||||||
|
|
||||||
|
const crawlableUrls = urls.map(u => {
|
||||||
|
if (u.crawlable) {
|
||||||
|
return `/r/${u.slug}`
|
||||||
|
}
|
||||||
|
}).filter(v => typeof v === 'string')
|
||||||
|
|
||||||
return {
|
return {
|
||||||
rules: {
|
rules: [
|
||||||
|
{
|
||||||
userAgent: "*",
|
userAgent: "*",
|
||||||
allow: ["/", "/r/"],
|
allow: "/",
|
||||||
disallow: "/dashboard/",
|
disallow: ["/api", "/dasboard", "/sign-in"],
|
||||||
}
|
crawlDelay: 1
|
||||||
|
}, {
|
||||||
|
userAgent: "*",
|
||||||
|
disallow: "/r/",
|
||||||
|
allow: crawlableUrls,
|
||||||
|
crawlDelay: 1
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Reference in New Issue
Block a user