diff --git a/client/src/app/robots.ts b/client/src/app/robots.ts new file mode 100644 index 0000000..166918e --- /dev/null +++ b/client/src/app/robots.ts @@ -0,0 +1,21 @@ +import { env } from "@/env"; + +import type { MetadataRoute } from "next"; + +export default function robots(): MetadataRoute.Robots { + if (env.NEXT_USE_RESTRICTIVE_ROBOTS_TXT) { + return { + rules: { + userAgent: "*", + disallow: "/", + }, + }; + } + + return { + rules: { + userAgent: "*", + allow: "/", + }, + }; +} diff --git a/client/src/env.ts b/client/src/env.ts index 7ddd6e3..a05251c 100644 --- a/client/src/env.ts +++ b/client/src/env.ts @@ -6,7 +6,14 @@ export const env = createEnv({ * Serverside Environment variables, not available on the client. * Will throw if you access these variables on the client. */ - server: {}, + server: { + // If `true` or left empty, crawlers (including search engines) are not allowed to index the + // website + NEXT_USE_RESTRICTIVE_ROBOTS_TXT: z.preprocess( + (value) => (!value || value === "true" ? true : false), + z.boolean(), + ), + }, /* * Environment variables available on the client (and server). * @@ -27,5 +34,6 @@ export const env = createEnv({ runtimeEnv: { NEXT_PUBLIC_MAPBOX_TOKEN: process.env.NEXT_PUBLIC_MAPBOX_TOKEN, NEXT_PUBLIC_MAPBOX_STYLE: process.env.NEXT_PUBLIC_MAPBOX_STYLE, + NEXT_USE_RESTRICTIVE_ROBOTS_TXT: process.env.NEXT_USE_RESTRICTIVE_ROBOTS_TXT, }, });