Fix robots.txt generation (#47625)
### What? Fix robots.txt generation ### Why? Should support multiple allow/disallow rules, they should be generated in different lines ### How? Closes NEXT-905 Fixes #47224 fix NEXT-905 ([link](https://linear.app/vercel/issue/NEXT-905))
This commit is contained in:
parent
54fce53dd9
commit
f6e6c2f01a
3 changed files with 32 additions and 9 deletions
|
@ -38,7 +38,7 @@ describe('resolveRouteData', () => {
|
|||
},
|
||||
{
|
||||
userAgent: 'Googlebot',
|
||||
allow: '/bot',
|
||||
allow: ['/bot', '/bot2'],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
@ -54,7 +54,17 @@ describe('resolveRouteData', () => {
|
|||
rules: { allow: '/' },
|
||||
}
|
||||
|
||||
resolveRobots(data1)
|
||||
expect(resolveRobots(data1)).toMatchInlineSnapshot(`
|
||||
"User-Agent: *
|
||||
Allow: /
|
||||
|
||||
User-Agent: Googlebot
|
||||
Allow: /bot
|
||||
Allow: /bot2
|
||||
|
||||
"
|
||||
`)
|
||||
|
||||
resolveRobots(data2)
|
||||
expect(resolveRobots(data3)).toMatchInlineSnapshot(`
|
||||
"User-Agent: *
|
||||
|
|
|
@ -3,22 +3,28 @@ import type {
|
|||
Sitemap,
|
||||
} from '../../../../lib/metadata/types/metadata-interface'
|
||||
import type { Manifest } from '../../../../lib/metadata/types/manifest-types'
|
||||
import { resolveAsArrayOrUndefined } from '../../../../lib/metadata/generate/utils'
|
||||
import { resolveArray } from '../../../../lib/metadata/generate/utils'
|
||||
|
||||
// convert robots data to txt string
|
||||
export function resolveRobots(data: Robots): string {
|
||||
let content = ''
|
||||
const rules = Array.isArray(data.rules) ? data.rules : [data.rules]
|
||||
for (const rule of rules) {
|
||||
const userAgent = resolveAsArrayOrUndefined(rule.userAgent) || ['*']
|
||||
const userAgent = resolveArray(rule.userAgent || ['*'])
|
||||
for (const agent of userAgent) {
|
||||
content += `User-Agent: ${agent}\n`
|
||||
}
|
||||
if (rule.allow) {
|
||||
content += `Allow: ${rule.allow}\n`
|
||||
const allow = resolveArray(rule.allow)
|
||||
for (const item of allow) {
|
||||
content += `Allow: ${item}\n`
|
||||
}
|
||||
}
|
||||
if (rule.disallow) {
|
||||
content += `Disallow: ${rule.disallow}\n`
|
||||
const disallow = resolveArray(rule.disallow)
|
||||
for (const item of disallow) {
|
||||
content += `Disallow: ${item}\n`
|
||||
}
|
||||
}
|
||||
if (rule.crawlDelay) {
|
||||
content += `Crawl-delay: ${rule.crawlDelay}\n`
|
||||
|
@ -28,8 +34,8 @@ export function resolveRobots(data: Robots): string {
|
|||
if (data.host) {
|
||||
content += `Host: ${data.host}\n`
|
||||
}
|
||||
const sitemap = resolveAsArrayOrUndefined(data.sitemap)
|
||||
if (sitemap) {
|
||||
if (data.sitemap) {
|
||||
const sitemap = resolveArray(data.sitemap)
|
||||
// TODO-METADATA: support injecting sitemap url into robots.txt
|
||||
sitemap.forEach((item) => {
|
||||
content += `Sitemap: ${item}\n`
|
||||
|
|
|
@ -1,3 +1,10 @@
|
|||
function resolveArray<T>(value: T): T[] {
|
||||
if (Array.isArray(value)) {
|
||||
return value
|
||||
}
|
||||
return [value]
|
||||
}
|
||||
|
||||
function resolveAsArrayOrUndefined<T extends unknown | readonly unknown[]>(
|
||||
value: T | T[] | undefined | null
|
||||
): undefined | T[] {
|
||||
|
@ -10,4 +17,4 @@ function resolveAsArrayOrUndefined<T extends unknown | readonly unknown[]>(
|
|||
return [value]
|
||||
}
|
||||
|
||||
export { resolveAsArrayOrUndefined }
|
||||
export { resolveAsArrayOrUndefined, resolveArray }
|
||||
|
|
Loading…
Reference in a new issue