diff --git a/.gitignore b/.gitignore index da3375271..ddacb179d 100644 --- a/.gitignore +++ b/.gitignore @@ -40,4 +40,7 @@ yarn-error.log* /public/llms-full.txt /public/**/*.md +# Split OpenAPI spec files (generated by scripts/splitOpenApiSpec.ts) +/data/specs/*/resources/ + tsconfig.tsbuildinfo diff --git a/.tool-versions b/.tool-versions index a93e70893..5d7898e38 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1,2 +1,2 @@ -nodejs 20.19.3 +nodejs 24.13.0 yarn 1.22.10 diff --git a/components/api-reference/ApiReferenceLayout.tsx b/components/api-reference/ApiReferenceLayout.tsx new file mode 100644 index 000000000..770cff5ce --- /dev/null +++ b/components/api-reference/ApiReferenceLayout.tsx @@ -0,0 +1,140 @@ +import { useEffect, useRef } from "react"; +import { useRouter } from "next/router"; +import Meta from "@/components/Meta"; +import { Page as TelegraphPage } from "@/components/ui/Page"; +import { Sidebar, SidebarContext } from "@/components/ui/Page/Sidebar"; +import { ContentActions } from "@/components/ui/ContentActions"; +import { SidebarData, SidebarSection } from "@/lib/openApiSpec"; +import { SidebarSection as LegacySidebarSection } from "@/data/types"; + +interface Breadcrumb { + label: string; + href: string; +} + +interface ApiReferenceLayoutProps { + children: React.ReactNode; + sidebarData: SidebarData; + preSidebarContent?: LegacySidebarSection[]; + title: string; + description: string; + breadcrumbs?: Breadcrumb[]; + currentPath?: string; +} + +/** + * Convert new SidebarData format to legacy SidebarSection format + * used by the existing Page components. + */ +function convertToLegacySidebarFormat( + sidebarData: SidebarData, + preSidebarContent: LegacySidebarSection[] = [], +): LegacySidebarSection[] { + const resourceSections: LegacySidebarSection[] = sidebarData.resources.map( + (resource: SidebarSection) => ({ + title: resource.title, + slug: resource.slug, + pages: [ + { slug: "/", title: "Overview" }, + ...resource.pages.map((page) => ({ + slug: page.slug.replace(resource.slug, ""), + title: page.title, + // subPage slugs need to be relative to their parent page, not the resource + pages: page.pages?.map((subPage) => ({ + slug: subPage.slug.replace(page.slug, ""), + title: subPage.title, + })), + })), + ], + }), + ); + + return [...preSidebarContent, ...resourceSections]; +} + +export function ApiReferenceLayout({ + children, + sidebarData, + preSidebarContent = [], + title, + description, + breadcrumbs, + currentPath, +}: ApiReferenceLayoutProps) { + const router = useRouter(); + const scrollerRef = useRef(null); + + useEffect(() => { + const prefetch = router.prefetch; + router.prefetch = async () => {}; + + return () => { + router.prefetch = prefetch; + }; + }, [router]); + + const basePath = router.pathname.split("/")[1]; + const canonicalPath = currentPath || `/${basePath}`; + + const sidebarContent = convertToLegacySidebarFormat( + sidebarData, + preSidebarContent, + ); + + // For per-resource pages, currentPath is the resource base path (e.g., /api-reference/users) + // This enables same-page routing for links within the current resource + const sidebarContextValue = { + samePageRouting: true, + currentResourcePath: currentPath, + }; + + return ( + + + + + + } + /> + + + + + {sidebarContent.map((section) => ( + + ))} + + + + + {breadcrumbs && breadcrumbs.length > 0 && ( + + )} + + } + /> + {children} + + + + ); +} + +export default ApiReferenceLayout; diff --git a/components/api-reference/ResourceFullPage.tsx b/components/api-reference/ResourceFullPage.tsx new file mode 100644 index 000000000..6bb3954c5 --- /dev/null +++ b/components/api-reference/ResourceFullPage.tsx @@ -0,0 +1,28 @@ +import { SplitApiReferenceProvider } from "@/components/ui/ApiReference/ApiReferenceContext"; +import { ApiReferenceSection } from "@/components/ui/ApiReference"; +import { SplitResourceData } from "@/lib/openApiSpec"; +import { useInitialScrollState } from "@/components/ui/Page/helpers"; + +interface ResourceFullPageProps { + data: SplitResourceData; + basePath: string; +} + +/** + * Renders a full resource page with all methods, schemas, and subresources. + * Uses split resource data for optimal page size. + */ +export function ResourceFullPage({ data, basePath }: ResourceFullPageProps) { + // Handle scroll to hash on initial load + useInitialScrollState(); + + return ( + + + + ); +} diff --git a/components/api-reference/index.ts b/components/api-reference/index.ts new file mode 100644 index 000000000..98d4480b0 --- /dev/null +++ b/components/api-reference/index.ts @@ -0,0 +1,2 @@ +export { ApiReferenceLayout } from "./ApiReferenceLayout"; +export { ResourceFullPage } from "./ResourceFullPage"; diff --git a/components/ui/Accordion.tsx b/components/ui/Accordion.tsx index 0c4646c93..fddf7d2d8 100644 --- a/components/ui/Accordion.tsx +++ b/components/ui/Accordion.tsx @@ -8,7 +8,7 @@ import { ChevronRight } from "lucide-react"; const AccordionGroup = ({ children }) => (
{children} @@ -37,8 +37,7 @@ const Accordion = ({ onClick={() => setOpen(!open)} aria-controls={title + "Children"} aria-expanded={open} - py="8" - px="8" + p="6" w="full" justifyContent="flex-start" alignItems="center" @@ -54,10 +53,10 @@ const Accordion = ({ flexShrink: 0, }} /> - + + // Use basePath to ensure the mdPath stays at the root (e.g., /api-reference.md) + // regardless of scroll-based URL changes + } /> diff --git a/components/ui/ApiReference/ApiReferenceContext.tsx b/components/ui/ApiReference/ApiReferenceContext.tsx index 6a6391205..52eab55e6 100644 --- a/components/ui/ApiReference/ApiReferenceContext.tsx +++ b/components/ui/ApiReference/ApiReferenceContext.tsx @@ -1,6 +1,6 @@ -import { createContext, useContext, ReactNode } from "react"; +import { createContext, useContext, ReactNode, useMemo } from "react"; import { OpenAPIV3 } from "@scalar/openapi-types"; -import { StainlessConfig } from "../../../lib/openApiSpec"; +import { StainlessConfig, SplitResourceData } from "../../../lib/openApiSpec"; import { buildSchemaReferences } from "./helpers"; import { useRouter } from "next/router"; @@ -11,10 +11,37 @@ interface ApiReferenceContextType { schemaReferences: Record; } +/** + * Context type for split resource data. + * Uses a minimal OpenAPI document structure with only the paths and schemas needed. + */ +interface SplitApiReferenceContextType { + openApiSpec: OpenAPIV3.Document; + baseUrl: string; + schemaReferences: Record; +} + const ApiReferenceContext = createContext( undefined, ); +const SplitApiReferenceContext = createContext< + SplitApiReferenceContextType | undefined +>(undefined); + +/** + * Lightweight context that only provides schemaReferences and baseUrl. + * Used by multi-page API reference components. + */ +interface LightweightContextType { + schemaReferences: Record; + baseUrl: string; +} + +const LightweightContext = createContext( + undefined, +); + interface ApiReferenceProviderProps { children: ReactNode; openApiSpec: OpenAPIV3.Document; @@ -46,14 +73,111 @@ export function ApiReferenceProvider({ ); } +/** + * Provider for split resource data. + * Converts split data into a minimal OpenAPI document structure that components can use. + */ +interface SplitApiReferenceProviderProps { + children: ReactNode; + data: SplitResourceData; +} + +export function SplitApiReferenceProvider({ + children, + data, +}: SplitApiReferenceProviderProps) { + // Build a minimal OpenAPI document from the split data + const openApiSpec = useMemo(() => { + return { + openapi: "3.0.0", + info: { title: "", version: "" }, + paths: data.paths, + components: { + schemas: data.schemas, + }, + }; + }, [data.paths, data.schemas]); + + return ( + + {children} + + ); +} + +/** + * Lightweight provider for multi-page API reference that only needs + * schemaReferences and baseUrl (without loading full specs). + */ +interface LightweightApiReferenceProviderProps { + children: ReactNode; + schemaReferences: Record; + baseUrl: string; +} + +export function LightweightApiReferenceProvider({ + children, + schemaReferences, + baseUrl, +}: LightweightApiReferenceProviderProps) { + return ( + + {children} + + ); +} + +/** + * Hook that returns the API reference context. + * Works with both the full ApiReferenceProvider and SplitApiReferenceProvider. + */ export function useApiReference() { - const context = useContext(ApiReferenceContext); - if (context === undefined) { - throw new Error( - "useApiReference must be used within an ApiReferenceProvider", - ); + const fullContext = useContext(ApiReferenceContext); + const splitContext = useContext(SplitApiReferenceContext); + + if (fullContext) { + return fullContext; + } + + if (splitContext) { + return splitContext; } - return context; + + throw new Error( + "useApiReference must be used within an ApiReferenceProvider or SplitApiReferenceProvider", + ); +} + +/** + * Hook that returns schemaReferences from any of the available contexts. + * Use this in components that only need schemaReferences. + */ +export function useSchemaReferences(): Record { + const fullContext = useContext(ApiReferenceContext); + const splitContext = useContext(SplitApiReferenceContext); + const lightweightContext = useContext(LightweightContext); + + if (fullContext) { + return fullContext.schemaReferences; + } + + if (splitContext) { + return splitContext.schemaReferences; + } + + if (lightweightContext) { + return lightweightContext.schemaReferences; + } + + throw new Error( + "useSchemaReferences must be used within an ApiReferenceProvider, SplitApiReferenceProvider, or LightweightApiReferenceProvider", + ); } export default ApiReferenceContext; diff --git a/components/ui/ApiReference/ApiReferenceMethod/ApiReferenceMethod.tsx b/components/ui/ApiReference/ApiReferenceMethod/ApiReferenceMethod.tsx index 102d06b9f..dd327a1f9 100644 --- a/components/ui/ApiReference/ApiReferenceMethod/ApiReferenceMethod.tsx +++ b/components/ui/ApiReference/ApiReferenceMethod/ApiReferenceMethod.tsx @@ -4,7 +4,7 @@ import Markdown from "react-markdown"; import { Callout } from "@/components/ui/Callout"; import RateLimit from "@/components/ui/RateLimit"; -import { Box } from "@telegraph/layout"; +import { Box, Stack } from "@telegraph/layout"; import { Code, Heading } from "@telegraph/typography"; import { AnimatePresence, motion } from "framer-motion"; import { ContentColumn, ExampleColumn, Section } from "../../ApiSections"; @@ -67,186 +67,159 @@ function ApiReferenceMethod({ return (
+ {method.description ?? ""} + {isBeta && ( + + This endpoint is currently in beta. If you'd like early + access, or this is blocking your adoption of Knock, please{" "} + + get in touch + + . + + } + /> + )} + + } isIdempotent={isIdempotent} isRetentionSubject={isRetentionSubject} path={path} mdPath={mdPath} > - {method.description ?? ""} - {isBeta && ( - - This endpoint is currently in beta. If you'd like early access, - or this is blocking your adoption of Knock, please{" "} - - get in touch - - . - - } - /> - )} - - - Endpoint - - - - - {rateLimit && ( - - - Rate limit + + + + Endpoint - - - )} + + + + {rateLimit && ( + + + + )} + + + + {pathParameters.length > 0 && ( + + + Path parameters + + + + )} - {pathParameters.length > 0 && ( - <> - - Path parameters - - - - )} + {queryParameters.length > 0 && ( + + + Query parameters + + + + )} - {queryParameters.length > 0 && ( - <> - - Query parameters - - - - )} + {requestBody && ( + + + Request body + + + + )} - {requestBody && ( - <> - - Request body + + + Returns - - - )} - - Returns - - - {responseSchemas.length > 0 && - responseSchemas.map((responseSchema) => ( - - - - - {responseSchema.title} - - - - {responseSchema.description ?? ""} - - - {responseSchema.properties && ( - <> - setIsResponseExpanded(!isResponseExpanded)} - > - {isResponseExpanded - ? "Hide properties" - : "Show properties"} - - - - 0 && + responseSchemas.map((responseSchema) => ( + + + + - - - - - - + {responseSchema.title} + + + + {responseSchema.description ?? ""} + + + {responseSchema.properties && ( + <> + + setIsResponseExpanded(!isResponseExpanded) + } + > + {isResponseExpanded + ? "Hide properties" + : "Show properties"} + + + + + + + + + + + )} + + + ))} + + {responseSchemas.length === 0 && ( + + {formatResponseStatusCodes(method).map( + (formattedStatus, index) => ( + + {formattedStatus} + + ), )} - - - ))} - - {responseSchemas.length === 0 && ( - - {formatResponseStatusCodes(method).map((formattedStatus, index) => ( - - {formattedStatus} - - ))} - - )} + + )} + + -
+
{resource.description} + } + path={basePath} + mdPath={resourceMdPath} + > - {resource.description && ( - {resource.description} - )} - - {Object.entries(methods).length > 0 && ( {Object.entries(methods).map( @@ -66,7 +68,7 @@ function ApiReferenceSection({ resourceName, resource, path }: Props) { )} )} - +
@@ -124,25 +126,19 @@ function ApiReferenceSection({ resourceName, resource, path }: Props) {
{schema.description} + } path={schemaPath} mdPath={schemaMdPath} > - {schema.description && ( - {schema.description} - )} - - - Attributes - - + + + Attributes + + + ( ); const Wrapper = ({ children }) => { - return {children}; + return ( + + {children} + + ); }; const Container = ({ children }) => { return ( {children} diff --git a/components/ui/ApiReference/SchemaProperties/SchemaProperty.tsx b/components/ui/ApiReference/SchemaProperties/SchemaProperty.tsx index f27e569e3..dd2e42ed6 100644 --- a/components/ui/ApiReference/SchemaProperties/SchemaProperty.tsx +++ b/components/ui/ApiReference/SchemaProperties/SchemaProperty.tsx @@ -10,7 +10,7 @@ import { resolveChildProperties, hydrateRequiredChildProperties, } from "./helpers"; -import { useApiReference } from "../ApiReferenceContext"; +import { useSchemaReferences } from "../ApiReferenceContext"; import { Stack } from "@telegraph/layout"; import { Text } from "@telegraph/typography"; import { AnimatePresence, motion } from "framer-motion"; @@ -21,7 +21,7 @@ type Props = { }; const SchemaProperty = ({ name, schema }: Props) => { - const { schemaReferences } = useApiReference(); + const schemaReferences = useSchemaReferences(); const [isPossibleTypesOpen, setIsPossibleTypesOpen] = useState(false); const [isChildPropertiesOpen, setIsChildPropertiesOpen] = useState(false); // If the schema is an array, then we want to show the possible types that the array can contain. diff --git a/components/ui/ApiSections.tsx b/components/ui/ApiSections.tsx index b774e2c0b..708780bbb 100644 --- a/components/ui/ApiSections.tsx +++ b/components/ui/ApiSections.tsx @@ -4,21 +4,28 @@ import { Text } from "@telegraph/typography"; import { highlightResource } from "./Page/helpers"; import Link from "next/link"; import { ContentActions } from "./ContentActions"; +import { Tag } from "@telegraph/tag"; export const Section = ({ title, + description, children, isIdempotent = false, isRetentionSubject = false, path = undefined, mdPath, + slug: _slug, // Explicitly destructure to prevent passing to DOM + direction: _direction, // Some sections pass this, prevent passing to DOM }: { title?: string; + description?: React.ReactNode; children: React.ReactNode; isIdempotent?: boolean; isRetentionSubject?: boolean; path?: string; - mdPath?: string; // New prop type + mdPath?: string; + slug?: string; + direction?: string; }) => { const onRetentionClick = (e: React.MouseEvent) => { e.preventDefault(); @@ -42,70 +49,46 @@ export const Section = ({ py="16" data-resource-path={path} > - {title && ( - <> - {isIdempotent && ( - - - - Idempotent - - - - )} - {isRetentionSubject && ( - - - - Retention policy applied - - - - )} - - - {title} - - {mdPath && } - - - )} + + {isIdempotent && ( + + + Idempotent + + + )} + {isRetentionSubject && ( + + + Retention policy applied + + + )} + + + + + {title} + + {description && {description}} + + {mdPath && {}} + @@ -116,22 +99,19 @@ export const Section = ({ }; export const ContentColumn = ({ children }) => ( - - {children} + + {children} ); export const ExampleColumn = ({ children }) => ( {children} diff --git a/components/ui/Autocomplete.tsx b/components/ui/Autocomplete.tsx index 20fd9710b..f11c398c3 100644 --- a/components/ui/Autocomplete.tsx +++ b/components/ui/Autocomplete.tsx @@ -645,30 +645,28 @@ const Autocomplete = () => { /> } TrailingComponent={ - <> - {autocompleteState?.query ? ( -
+
@@ -457,4 +458,5 @@ Knock uses standard [HTTP response codes](https://developer.mozilla.org/en-US/We />
+
diff --git a/content/__cli/content.mdx b/content/__cli/content.mdx deleted file mode 100644 index bcb76c5b2..000000000 --- a/content/__cli/content.mdx +++ /dev/null @@ -1,2857 +0,0 @@ ---- -title: CLI reference -description: Learn more about the commands and flags available in the Knock CLI. -tags: ["cli", "command line", "cmd", "command-line", "terminal"] -layout: cli -Section: CLI ---- - - - -
- - -This reference documents every command and flag available in Knock's command-line interface. - -The Knock CLI helps you work with your Knock resources right from the terminal. - -With the CLI, you can: - -- Work with your Knock workflows and notification templates locally. -- Integrate Knock into your CI/CD environment to automatically promote changes. -- Map your translation files into Knock to localize your notifications. - - -
- -
- - -**Install with Homebrew** - -For macOS, you can install the Knock CLI using [Homebrew](https://brew.sh/). Once the CLI is installed you can call it by using the `knock` command in your terminal. - -**Install with npm** - -For other operating systems, you can install the Knock CLI using `npm`, a node package manager. Once the CLI is installed, you can call it by using the `knock` command in your terminal. - -**Requirements** - -The Knock CLI is built with Node.js and installable as a `npm` package. You must have `node` and `npm` installed already, with the following versions: - -- Node.js: 16.14.0 or higher -- NPM: 7.18.1 or higher - -You can find the Knock CLI npm package [here](https://www.npmjs.com/package/@knocklabs/cli). - - - - -```bash title="Install the Knock CLI with homebrew" -brew install knocklabs/tap/knock -``` - -```bash title="Install the Knock CLI with npm" -npm install -g @knocklabs/cli -``` - - -
- -
- - -**Using your Knock account** - -You can authenticate your Knock account against the CLI by running `knock login`. This will open a browser window where you can sign in to your Knock account and authorize the CLI to access your account. - -Once authenticated, you can verify it works by running `knock whoami`. If your account is valid and configured properly, you'll receive a 200 response that shows the account name and your user ID. - - - Using your Knock account against the CLI will inherit the permissions of - the user that is logged in on the account you authorized. - - } -/> - -If you need to switch between accounts, you can run `knock logout` to log out of your current account and log in to a different one. - -**Using a service token** - -If you need to authenticate in a remote environment, or want complete control, you can generate a [service token](/developer-tools/service-tokens) in the Knock dashboard. You can specify a service token in all CLI calls, or you can optionally use a configuration file to authenticate all requests. - -Once you have generated a service token, you can verify it works by running `knock whoami --service-token=YOUR_SERVICE_TOKEN`. If your token is valid and configured properly, you'll receive a 200 response that shows the account name and the service token name. - -**Setting up a configuration file (optional)** - -A service token is required by the CLI for most commands. For convenience, Knock CLI supports a user configuration file, where you can store the service token for the CLI to read automatically rather than having to manually pass in with `--service-token` flag for every command. - -To set up a user configuration file, create a `config.json` file in the Knock CLI's config directory at `~/.config/knock` (macOS/Unix) or `%LOCALAPPDATA%\knock` (Windows), and add the following json: - -```json title="config.json" -{ - "serviceToken": "YOUR_SERVICE_TOKEN" -} -``` - -When Knock CLI detects a user configuration file, it will use the service token provided in it automatically. - - - - -```bash title="Log in to your Knock account" -knock login -``` - -```bash title="Verify your service token" -knock whoami --service-token=XXX -``` - - -
- -
- - -The following flags are supported for every command. - -**Flags** - - - - - - -
- -
- - You can configure your Knock project by creating a `knock.json` file or by using the `knock init` command to generate one for you. This - file is a project-level configuration file that tells the Knock CLI where to - find your Knock resources. - - For example, if you want to store your Knock resources in the `.knock/` directory, you can create a `knock.json` file with the following content: - -{/* prettier-ignore */} -```json title="Example knock.json file" -{ - "knockDir": ".knock/" -} -``` - - Once you have created the `knock.json` file, all subsequent `knock pull` and `knock push` commands will use the `.knock/` directory as the default target directory relative to the location of the `knock.json` file, regardless of the directory you are currently in. - - If you need to specify a different target directory for a single command, you can use the `--knock-dir` flag, or the `--{resource-type}-dir` flag for specific resource types. - - -
- -
- - -There is no required directory structure when working with Knock resources locally. However, if you use the `knock pull` or `knock push` commands, they will produce and expect the directory structure outlined below. - -For forward compatibility, we recommend using this structure to ensure your local files work seamlessly with future CLI updates. - -When you use `knock pull`, resources will be grouped by resource type within subdirectories. The following directory structure will be created: - -{/* prettier-ignore */} - -{`./knock/ -├── guides/ -├── layouts/ -├── message-types/ -├── partials/ -├── translations/ -└── workflows/`} - - -Each resource type has its own directory structure, which is described in detail in the sections below for each resource type. - - -
- -
- - -Initializes a new Knock project by creating a `knock.json` file in the current working directory. - -### Flags - - - - - - - - -```bash title="Initialize a new Knock project" -knock init -``` - - -
- -
- - -You can log in to your Knock account with the `login` command. This will open a browser window where you can sign in to your Knock account and authorize the CLI to access your account. - - - - -```bash title="Log in to your Knock account" -knock login -``` - - -
- -
- - -You can log out of your Knock account with the `logout` command. This will clear the authentication token for the CLI. - - - - -```bash title="Log out of your Knock account" -knock logout -``` - - -
- -
- - -Pulls the contents of all Knock resources (workflows, partials, email layouts, translations, guides, and message-types) from Knock into your local file system. - -Resources will be grouped by resource type within subdirectories of the target directory path set either by your `knock.json` file or by the `--knock-dir` flag. See the [Directory structure](/cli/overview/directory-structure) section for details on the directory structure used by `push` and `pull` commands. - -### Flags - - - - - - - - - - - - -```bash title="Pull all resources" -knock pull --knock-dir=./knock -``` - - -
- -
- - -Pushes all local resource files (workflows, partials, email layouts, and translations) back to Knock and upserts them. - -Resources will be pushed to the target directory path set either by your `knock.json` file or by the `--knock-dir` flag. See the [Directory structure](/cli/overview/directory-structure) section for details on the directory structure used by `push` and `pull` commands. - -### Flags - - - - - - - - - - -```bash title="Push all resources" -knock push --knock-dir=./knock -``` - - -
- -
- - -Lists all environments in your Knock account. You can paginate the results using the `--after` and `--before` flags. - -### Flags - - - - - - - - - - - -```bash title="List all environments" -knock environment list -``` - - -
- -
- - -Lists all channels in your Knock account. You can paginate the results using the `--after` and `--before` flags. - -### Flags - - - - - - - - - - - -```bash title="List all channels" -knock channel list -``` - - -
- -
- - -[Branches](/version-control/branches) are a way to isolate changes to your Knock resources. It's like a sandbox for your changes, allowing you to make changes to your resources without affecting the main branch (your development environment), or other branches in your account. - - - Branches are currently in beta. If you'd like early access, or this is - blocking your adoption of Knock, please{" "} - get in touch. - - } -/> - - -
- -
- - -Lists all branches within your Knock account. You can paginate the results using the `--after` and `--before` flags. - -### Flags - - - - - - - - - -```bash title="List all branches" -knock branch list -``` - - -
- -
- - -Creates a new branch with the given slug. If the branch already exists, the command will return an error. - -### Flags - - - - - - - - -```bash title="Create a new branch" -knock branch create my-branch -``` - - -
- -
- - -Deletes the branch with the given slug. - -Deleting a branch is a permanent operation and cannot be undone.} -/> - -### Flags - - - - - - - - -```bash title="Delete a branch" -knock branch delete my-branch -``` - - -
- -
- - -Switches to an existing branch with the given slug. - -Switching to the branch will persist until you exit the branch with the `knock branch exit` command by updating the `.knockbranch` file in your directory. - -### Flags - - - - - - - - - -```bash title="Switch to a branch" -knock branch switch my-branch -``` - -```bash title="Switch to a branch and create it if it doesn't exist" -knock branch switch my-branch --create -``` - - -
- -
- - -Exits the current branch by updating the `.knockbranch` file in your directory to the main branch. - - - - -```bash title="Exit a branch" -knock branch exit -``` - - -
- -
- - -Merges a branch into the development environment. By default, the branch will be deleted after merging. - -### Flags - - - - - - - - - -```bash title="Merge a branch" -knock branch merge my-branch -``` - -```bash title="Merge a branch without deleting it" -knock branch merge my-branch --no-delete -``` - - -
- -
- - -When workflows are pulled from Knock, they are stored in directories named by their workflow key. In addition to a `workflow.json` file that describes all of a given workflow's steps, each workflow directory also contains individual folders for each of the [channel steps](/designing-workflows/channel-step) in the workflow that hold additional content and formatting data. - -{/* prettier-ignore */} - -{`workflows/ -└── my-workflow/ - ├── email_1/ - │ ├── visual_blocks/ - │ │ └── 1.content.md - │ └── visual_blocks.json - ├── in_app_feed_1/ - │ └── markdown_body.md - └── workflow.json`} - - -If you're migrating your local workflow files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock workflow push --all`](/cli/workflow/push). Each `workflow.json` file should follow the structure defined [here](/mapi-reference/workflows/schemas/workflow). - - -
- -
- - -You can see all your existing workflows in a given environment with the workflow list command. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock workflow list -``` - -```bash title="Pagination example" -knock workflow list --after=xxx -``` - - -
- -
- - -You can show more details about a given workflow with the `workflow get` command, followed by the target workflow key. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - -```bash title="Basic usage" -knock workflow get my-workflow -``` - -```bash title="Get workflow in a different environment" -knock workflow get my-workflow --environment=production -``` - - -
- -
- - -Create a new workflow with a minimal configuration. You can either select steps interactively or use a template to scaffold the workflow. - -The command will create a new workflow directory in your local file system. By default, this will be in the workflows resource directory set by your `knock.json` file, or the current working directory if not configured. - -### Flags - - - - - - - - - - - - - - - -```bash title="Create a workflow interactively" -knock workflow new -``` - -```bash title="Create a workflow with a specific key" -knock workflow new --key=my-workflow -``` - -```bash title="Create a workflow with specific steps" -knock workflow new --key=my-workflow --steps=email,sms,in_app_feed -``` - -```bash title="Create a workflow from a template" -knock workflow new --key=my-workflow --template=workflows/digest-email -``` - -```bash title="Create and push a workflow" -knock workflow new --key=my-workflow --steps=email --push -``` - - -
- -
- - -You can pull and download workflows with its message templates from Knock to a local file system with the `workflow pull` command. Knock CLI will create a new workflow directory or update the existing workflow directory in the local file system. - -By default this command will resolve to the workflows resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--workflows-dir` flag. - -Note: if pulling the target workflow for the first time (or all workflows), Knock CLI will ask to confirm before writing to the local file system. - -See the [Workflow file structure](/cli/workflow/file-structure) section for details on how workflow files are organized. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock workflow pull my-workflow -``` - -```bash title="Pulling a workflow in a different environment" -knock workflow pull my-workflow --environment=production -``` - -```bash title="Pulling all workflows into ./workflows directory" -knock workflow pull --all --workflows-dir=./workflows -``` - - -
- -
- - -You can push and upload a workflow directory to Knock with the `workflow push` command. Knock will update an existing workflow by the matching workflow key, or create a new workflow if it does not exist yet. - -By default this command will resolve to the workflows resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--workflows-dir` flag. - -Note: - -- The `workflow push` command only pushes workflows into the `development` environment. -- You must be directly above the target workflow directory when running the `workflow push` command, so the CLI can locate the `workflow.json` file. -- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. - -See the [Workflow file structure](/cli/workflow/file-structure) section for details on how workflow files are organized. - -### Flags - - - - - - - - - - - -```bash title="Basic usage" -knock workflow push my-workflow -``` - -```bash title="Pushing a workflow and committing with a message" -knock workflow push my-workflow \ - --commit \ - -m "Commit message" -``` - -```bash title="Pushing all workflows from ./workflows directory" -knock workflow push --all --workflows-dir=./workflows -``` - - -
- -
- - -You can run a workflow with the `workflow run` command. Knock will execute a run for the latest saved version of the workflow it finds with the given key and parameters you send it. - -Note: - -- Changes to the local version of the workflow in your file system will not be reflected in a workflow run; it will use the current version that is stored in Knock. - -### Flags - - - - - - - - - - - - - - -```bash title="Basic usage" -knock workflow run my-workflow \ - --environment=production \ - --recipients=ellie -``` - - -
- -
- - -You can validate a new or updated workflow directory with the `workflow validate` command. Knock will validate the given workflow payload in the same way as it would with the `workflow push` command, except without persisting those changes. - -Note: Validating a workflow is only done against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock workflow validate my-workflow -``` - - -
- -
- - -You can activate or deactivate a workflow in a given environment with the `workflow activate` command. - -Note: - -- This immediately enables or disables a workflow in a given environment without needing to go through environment promotion. -- By default, this command activates a given workflow. Pass in the `--status` flag with `false` in order to deactivate it. - -### Flags - - - - - - - - - - -```bash title="Basic usage" -knock workflow activate my-workflow \ - --environment=development -``` - -```bash title="Deactivating a workflow" -knock workflow activate my-workflow \ - --environment=development \ - --status=false -``` - - -
- -
- - -Generate type definitions for workflow trigger data from your workflow schemas. This command fetches workflows with trigger data schemas and generates type-safe definitions for TypeScript, Python, Ruby, and Go. - -The generated types enable compile-time safety when triggering workflows in your application code, helping catch integration errors before runtime. The target language is inferred from the output file extension. - -Learn more about [type safety with workflows](/developer-tools/type-safety). - -### Flags - - - - - - - - - -```bash title="Generate TypeScript types" -knock workflow generate-types \ - --output-file=./types/knock-workflows.ts -``` - -```bash title="Generate Python types" -knock workflow generate-types \ - --output-file=./types/knock_workflows.py -``` - -```bash title="Generate Ruby types" -knock workflow generate-types \ - --output-file=./types/knock_workflows.rb -``` - -```bash title="Generate Go types" -knock workflow generate-types \ - --output-file=./types/knock_workflows.go -``` - -```bash title="Generate from production environment" -knock workflow generate-types \ - --environment=production \ - --output-file=./types/knock-workflows.ts -``` - - -
- -
- - -When email layouts are pulled from Knock, they are stored in directories named by their layout key. - -{/* prettier-ignore */} - -{`layouts/ -├── default/ -│ ├── html_layout.html -│ ├── layout.json -│ └── text_layout.txt -└── custom-layout/ - ├── html_layout.html - ├── layout.json - └── text_layout.txt`} - - -If you're migrating your local layout files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock layout push --all`](/cli/email-layout/push). Each `layout.json` file should follow the example shown below; additional information on the Layout structure is defined [here](/mapi-reference/email_layouts/schemas/email_layout). - -```json title="Local layout file example JSON" -{ - "key": "custom-layout", - "name": "Custom Layout", - "html_layout@": "html_layout.html", - "text_layout@": "text_layout.txt", - "footer_links": [{ "text": "My link", "url": "https://example.com" }] -} -``` - - -
- -
- - -List all email layouts in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock layout list -``` - - -
- -
- - -Fetches a single email layout, using the `key` of the email layout. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - -```bash title="Get layout" -knock layout get default -``` - -```bash title="Get layout in a different environment" -knock layout get default --environment=production -``` - - -
- -
- - -Create a new email layout with a minimal configuration. - -The command will create a new layout directory in your local file system. By default, this will be in the layouts resource directory set by your `knock.json` file, or the current working directory if not configured. - -### Flags - - - - - - - - - - - - - -```bash title="Create a layout interactively" -knock layout new -``` - -```bash title="Create a layout with a specific key and name" -knock layout new --key=my-layout --name="My Layout" -``` - -```bash title="Create and push a layout" -knock layout new --key=my-layout --push -``` - - -
- -
- - -Pulls the contents of one or all email layouts from Knock into your local file system. Using `` you can pull a single email layout specified by the key, or use the `--all` flag to pull all email layouts from Knock at once. - -By default this command will resolve to the email layouts resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--email-layouts-dir` flag. - -See the [Layout file structure](/cli/email-layout/file-structure) section for details on how layout files are organized. - -### Flags - - - - - - - - - - - -```bash title="Pull a single email layout" -knock layout pull default -``` - -```bash title="Pull all email layouts" -knock layout pull --all -``` - - -
- -
- - -Pushes local email layouts back to Knock and upserts them. Using `` you can push a single email layout specified by the key, or use the `--all` flag to push all email layouts from Knock at once. - -By default this command will resolve to the email layouts resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--email-layouts-dir` flag. - -See the [Layout file structure](/cli/email-layout/file-structure) section for details on how layout files are organized. - -### Flags - - - - - - - - - - - -```bash title="Push a single email layout" -knock layout push my-layout -``` - -```bash title="Push all email layouts" -knock layout push --all -``` - - -
- -
- - -Validates one or more email layouts. Useful for checking if the layout is valid before running the `email_layout push` command. - -The `` can be provided to validate a single email layout, or your can use the `--all` flag to validate all email layouts. - -Can only be validated against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock layout validate --all -``` - -```bash title="Validate a single email layout" -knock layout validate default -``` - - -
- -
- - -When translations are pulled from Knock, they are stored in directories named by their locale codes. Their filename will be their locale code. Any namespaced translations will prepend the namespace to the filename, with `.` used as a separator. - -{/* prettier-ignore */} - -{`translations/ -├── en/ -│ ├── en.json -│ └── admin.en.json -└── en-GB/ - ├── en-GB.json - └── tasks.en-GB.json`} - - -If you're migrating your local translation files into Knock, you can arrange them using the file structure above and then push them into Knock with a single command using [`knock translation push --all`](/cli/translation/push). Each `.json` or `..json` file should follow the structure defined [here](/mapi-reference/translations/schemas/translation). - - -
- -
- - -List all translations in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock translation list -``` - - -
- -
- - -You can show the content of a given translation with the `translation get` command, followed by the target translation ref. - -The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - -```bash title="Get translation without a namespace" -knock translation get en -``` - -```bash title="Get translation with a namespace" -knock translation get admin.en -``` - -```bash title="Get translation in a different environment" -knock translation get en --environment=production -``` - - -
- -
- - -You can pull and download translation files from Knock to a local file system with the `translation pull` command. Knock CLI will create a new translation file or update the existing file in the local file system. - -The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. - -When `` is a locale code and specified with the `--all` flag, all translations for that locale are pulled. - -By default this command will resolve to the translations resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--translations-dir` flag. - -See the [Translation file structure](/cli/translation/file-structure) section for details on how translation files are organized. - -### Flags - - - - - - - - - - - - -```bash title="Pull a single translation without a namespace" -knock translation pull en -``` - -```bash title="Pull a single translation with a namespace" -knock translation pull admin.en -``` - -```bash title="Pull all translations for a locale" -knock translation pull en --all -``` - -```bash title="Pull all translations" -knock translation pull --all -``` - -```bash title="Pull all translations as PO files" -knock translation pull --all --format=po -``` - - -
- -
- - -Pushes local translation files back to Knock and upserts them. - -The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. - -When `` is a locale code and specified with the `--all` flag, all translations for that locale are pushed. - -By default this command will resolve to the translations resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--translations-dir` flag. - -See the [Translation file structure](/cli/translation/file-structure) section for details on how translation files are organized. - -### Flags - - - - - - - - - - - -```bash title="Push a single translation without a namespace" -knock translation push en -``` - -```bash title="Push a single translation with a namespace" -knock translation push tasks.en -``` - -```bash title="Push all translation files for the en locale" -knock translation push en --all -``` - -```bash title="Push all translation files" -knock translation push --all -``` - - -
- -
- - -Validates one or more translation files. Useful for checking if the file is valid before running the `translation push` method. - -The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. - -Can only be validated against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock translation validate --all -``` - -```bash title="Validate a single file" -knock translation validate admin.en -``` - -```bash title="Validate all translations for the en locale" -knock translation validate en --all -``` - - -
- -
- - -When partials are pulled from Knock, they are stored in directories named by their partial key. Each partial directory contains a `partial.json` file that describes the partial's properties, and a content file based on the partial's type (HTML, markdown, plaintext, or JSON). - -{/* prettier-ignore */} - -{`partials/ -└── author-block/ - ├── content.html - └── partial.json`} - - -The content file name depends on the partial's type: - -- HTML partials: `content.html` -- Markdown partials: `content.md` -- Plaintext partials: `content.txt` -- JSON partials: `content.json` - -If you're migrating your local partial files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock partial push --all`](/cli/partial/push). - - -
- -
- - -List all partials in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock partial list -``` - - -
- -
- - -Create a new partial with a minimal configuration. - -The command will create a new partial directory in your local file system. By default, this will be in the partials resource directory set by your `knock.json` file, or the current working directory if not configured. - -### Flags - - - - - - - - - - - - - - -```bash title="Create a partial interactively" -knock partial new -``` - -```bash title="Create an HTML partial" -knock partial new --key=my-partial --type=html -``` - -```bash title="Create and push a partial" -knock partial new --key=my-partial --type=markdown --push -``` - - -
- -
- - -You can show more details about a given partial with the `partial get` command, followed by the target partial key. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - -```bash title="Get partial" -knock partial get my-partial -``` - -```bash title="Get partial in a different environment" -knock partial get my-partial --environment=production -``` - - -
- -
- - -You can pull and download partial files from Knock to a local file system with the `partial pull` command. Knock CLI will create a new partial directory or update the existing partial directory in the local file system. - -By default this command will resolve to the partial resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--partials-dir` flag. - -Note: if pulling the target partial for the first time (or all partials), Knock CLI will ask to confirm before writing to the local file system. - -See the [Partial file structure](/cli/partial/file-structure) section for details on how partial files are organized. - -### Flags - - - - - - - - - - - - - -```bash title="Pull a single partial" -knock partial pull my-partial -``` - -```bash title="Pull all partials" -knock partial pull --all -``` - - -
- -
- - -You can push and upload a partial directory to Knock with the `partial push` command. Knock will update an existing partial by the matching partial key, or create a new partial if it does not exist yet. - -By default this command will resolve to the partial resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--partials-dir` flag. - -Note: - -- The `partial push` command only pushes partials into the `development` environment. -- You must be directly above the target partial directory when running the `partial push` command, so the CLI can locate the `partial.json` file. -- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. - -### Flags - - - - - - - - - - - -```bash title="Push a single partial" -knock partial push my-partial -``` - -```bash title="Pushing a partial and committing with a message" -knock partial push my-partial \ - --commit \ - -m "Commit message" -``` - -```bash title="Pushing all partials from ./partials directory" -knock partial push --all --partials-dir=./partials -``` - - -
- -
- - -Validates one or more partial files. Useful for checking if the file is valid before running the `partial push` method. - -Can only be validated against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock partial validate --all -``` - -```bash title="Validate a single partial" -knock partial validate my-partial -``` - - -
- -
- - -List all commits in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - - - -```bash title="Basic usage" -knock commit list -``` - -```bash title="List unpromoted commits in a different environment" -knock commit list --no-promoted --environment=staging -``` - -```bash title="List commits for a specific workflow" -knock commit list --resource-type=workflow --resource-id=new-commet -``` - - -
- -
- - -Shows the details of a given commit, using the `id` of the commit. - -### Flags - - - - - - - - -```bash title="Basic usage" -knock commit get 69cdde18-830a-42e0-ad4b-a230943bdc90 -``` - - -
- -
- - -You can commit all changes across all resources in the development environment with the commit command. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock commit -m "Commit message" -``` - - -
- -
- - -You can promote one change to the subsequent environment, or all changes across all resources to the target environment from its directly preceding environment, using the `commit promote` command. - -Note: - -- For example, if you have three environments "development", "staging", and "production" (in that order), setting the `--to` flag to `production` will promote all new changes from the staging environment to the production environment. -- Promoting one single commit from staging using the `--only` flag, will result in that commit being promoted to production. -- The `--to` environment must be a non-development environment. -- The `--to` and `--only` flags can't be used together. - -### Flags - - - - - - - - - - -```bash title="Promotes all changes" -knock commit promote --to=production -``` - -```bash title="Promotes one change" -knock commit promote --only=69cdde18-830a-42e0-ad4b-a230943bdc90 -``` - - -
- -
- - -When guides are pulled from Knock, they are stored in directories named by their guide key. Each guide directory contains a `guide.json` file that describes the guide's configuration. - -{/* prettier-ignore */} - -{`guides/ -└── conference-banner/ - └── guide.json`} - - -If you're migrating your local guide files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock guide push --all`](/cli/guide/push). - - -
- -
- - -You can see all your existing guides in a given environment with the guide list command. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock guide list -``` - -```bash title="Pagination example" -knock guide list --after=xxx -``` - - -
- -
- - -Create a new guide with a minimal configuration. - -The command will create a new guide directory in your local file system. By default, this will be in the guides resource directory set by your `knock.json` file, or the current working directory if not configured. - -### Flags - - - - - - - - - - - - - - -```bash title="Create a guide interactively" -knock guide new -``` - -```bash title="Create a guide with a message type" -knock guide new --key=my-guide --message-type=banner -``` - -```bash title="Create and push a guide" -knock guide new --key=my-guide --message-type=modal --push -``` - - -
- -
- - -You can show more details about a given guide with the `guide get` command, followed by the target guide key. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - -```bash title="Basic usage" -knock guide get my-guide -``` - -```bash title="Get guide in a different environment" -knock guide get my-guide --environment=production -``` - - -
- -
- - -You can pull and download guides from Knock to a local file system with the `guide pull` command. Knock CLI will create a new guide directory or update the existing guide directory in the local file system. - -By default this command will resolve to the guides resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--guides-dir` flag. - -Note: if pulling the target guide for the first time (or all guides), Knock CLI will ask to confirm before writing to the local file system. - -See the [Guide file structure](/cli/guide/file-structure) section for details on how guide files are organized. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock guide pull my-guide -``` - -```bash title="Pulling a guide in a different environment" -knock guide pull my-guide --environment=production -``` - -```bash title="Pulling all guides into ./guides directory" -knock guide pull --all --guides-dir=./guides -``` - - -
- -
- - -You can push and upload a guide directory to Knock with the `guide push` command. Knock will update an existing guide by the matching guide key, or create a new guide if it does not exist yet. - -By default this command will resolve to the guides resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--guides-dir` flag. - -Note: - -- The `guide push` command only pushes guides into the `development` environment. -- You must be directly above the target guide directory when running the `guide push` command, so the CLI can locate the `guide.json` file. -- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. - -See the [Guide file structure](/cli/guide/file-structure) section for details on how guide files are organized. - -### Flags - - - - - - - - - - - -```bash title="Basic usage" -knock guide push my-guide -``` - -```bash title="Pushing a guide and committing with a message" -knock guide push my-guide \ - --commit \ - -m "Commit message" -``` - -```bash title="Pushing all guides from ./guides directory" -knock guide push --all --guides-dir=./guides -``` - - -
- -
- - -You can validate a new or updated guide directory with the `guide validate` command. Knock will validate the given guide payload in the same way as it would with the `guide push` command, except without persisting those changes. - -Note: Validating a guide is only done against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock guide validate my-guide -``` - - -
- -
- - -You can activate or deactivate a guide in a given environment with the `guide activate` command. You can either set the active status immediately or schedule it. - -### Flags - - - - - - - - - - - - -```bash title="Basic usage" -knock guide activate my-guide \ - --environment=development -``` - -```bash title="Deactivating a guide" -knock guide activate my-guide \ - --environment=development \ - --status=false -``` - - -
- -
- - -When message types are pulled from Knock, they are stored in directories named by their message type key. Each message type directory contains a `message_type.json` file that describes the message type's schema and configuration, and a `preview.html` file that contains the HTML preview template. - -{/* prettier-ignore */} - -{`message-types/ -└── alert-banner/ - ├── message_type.json - └── preview.html`} - - -If you're migrating your local message type files into Knock, you can arrange them using the example file structure above. - - -
- -
- - -Display all in-app message types for an environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock message-type list -``` - -```bash title="Pagination example" -knock message-type list --after=xxx -``` - - -
- -
- - -Create a new message type with a minimal configuration. - -The command will create a new message type directory in your local file system. By default, this will be in the message-types resource directory set by your `knock.json` file, or the current working directory if not configured. - -### Flags - - - - - - - - - - - - - -```bash title="Create a message type interactively" -knock message-type new -``` - -```bash title="Create a message type with a specific key and name" -knock message-type new --key=banner --name="Banner" -``` - -```bash title="Create and push a message type" -knock message-type new --key=my-message-type --push -``` - - -
- -
- - -Display a single in-app message type from an environment. - -Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. - -### Flags - - - - - - - - - - -```bash title="Basic usage" -knock message-type get my-message-type -``` - -```bash title="Get message type in a different environment" -knock message-type get my-message-type --environment=production -``` - - -
- -
- - -Pull one or more in-app message types from an environment into a local file system. Knock CLI will create a new message type directory or update the existing message type directory in the local file system. - -By default this command will resolve to the message types resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--message-types-dir` flag. - -Note: if pulling the target message type for the first time (or all message types), Knock CLI will ask to confirm before writing to the local file system. - -See the [Message type file structure](/cli/message-type/file-structure) section for details on how message type files are organized. - -### Flags - - - - - - - - - - - - - -```bash title="Basic usage" -knock message-type pull my-message-type -``` - -```bash title="Pulling a message type in a different environment" -knock message-type pull my-message-type --environment=production -``` - -```bash title="Pulling all message types into ./message-types directory" -knock message-type pull --all --message-types-dir=./message-types -``` - - -
- -
- - -Push one or more message types from a local file system to Knock. Knock will update an existing message type by the matching message type key, or create a new message type if it does not exist yet. - -By default this command will resolve to the message types resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--message-types-dir` flag. - -Note: - -- The `message-type push` command only pushes message types into the `development` environment. -- You must be directly above the target message type directory when running the `message-type push` command, so the CLI can locate the `message_type.json` file. -- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. - -See the [Message type file structure](/cli/message-type/file-structure) section for details on how message type files are organized. - -### Flags - - - - - - - - - - - -```bash title="Basic usage" -knock message-type push my-message-type -``` - -```bash title="Pushing a message type and committing with a message" -knock message-type push my-message-type \ - --commit \ - -m "Commit message" -``` - -```bash title="Pushing all message types from ./message-types directory" -knock message-type push --all --message-types-dir=./message-types -``` - - -
- -
- - -Validate one or more message types from a local file system. Knock will validate the given message type payload in the same way as it would with the `message-type push` command, except without persisting those changes. - -Note: Validating a message type is only done against the `development` environment. - -### Flags - - - - - - - - - -```bash title="Basic usage" -knock message-type validate my-message-type -``` - - -
diff --git a/content/cli/authentication.mdx b/content/cli/authentication.mdx new file mode 100644 index 000000000..e6b9afff5 --- /dev/null +++ b/content/cli/authentication.mdx @@ -0,0 +1,42 @@ +--- +title: Authentication commands +description: Commands for authenticating with the Knock CLI. +--- + +
+ + +The authentication commands enable you to log in and out of your Knock account via the CLI. + + +
+ +
+ + +You can log in to your Knock account with the `login` command. This will open a browser window where you can sign in to your Knock account and authorize the CLI to access your account. + + + + +```bash title="Log in to your Knock account" +knock login +``` + + +
+ +
+ + +You can log out of your Knock account with the `logout` command. This will clear the authentication token for the CLI. + + + + +```bash title="Log out of your Knock account" +knock logout +``` + + +
diff --git a/content/cli/branch.mdx b/content/cli/branch.mdx new file mode 100644 index 000000000..5c4a254c0 --- /dev/null +++ b/content/cli/branch.mdx @@ -0,0 +1,189 @@ +--- +title: Branches +description: Commands for managing branches in the Knock CLI. +--- + +
+ + + + Branches are currently in beta. If you'd like early access, or this is + blocking your adoption of Knock, please{" "} + get in touch. + + } +/> + +[Branches](/version-control/branches) are a way to isolate changes to your Knock resources. It's like a sandbox for your changes, allowing you to make changes to your resources without affecting the main branch (your development environment), or other branches in your account. + + +
+ +
+ + +Lists all branches within your Knock account. You can paginate the results using the `--after` and `--before` flags. + +### Flags + + + + + + + + + +```bash title="List all branches" +knock branch list +``` + + +
+ +
+ + +Creates a new branch with the given slug. If the branch already exists, the command will return an error. + +### Flags + + + + + + + + +```bash title="Create a new branch" +knock branch create my-branch +``` + + +
+ +
+ + +Deletes the branch with the given slug. + +Deleting a branch is a permanent operation and cannot be undone.} +/> + +### Flags + + + + + + + + +```bash title="Delete a branch" +knock branch delete my-branch +``` + + +
+ +
+ + +Switches to an existing branch with the given slug. + +Switching to the branch will persist until you exit the branch with the `knock branch exit` command by updating the `.knockbranch` file in your directory. + +### Flags + + + + + + + + + +```bash title="Switch to a branch" +knock branch switch my-branch +``` + +```bash title="Switch to a branch and create it if it doesn't exist" +knock branch switch my-branch --create +``` + + +
+ +
+ + +Exits the current branch by updating the `.knockbranch` file in your directory to the main branch. + + + + +```bash title="Exit a branch" +knock branch exit +``` + + +
+ +
+ + +Merges a branch into the development environment. By default, the branch will be deleted after merging. + +### Flags + + + + + + + + + +```bash title="Merge a branch" +knock branch merge my-branch +``` + +```bash title="Merge a branch without deleting it" +knock branch merge my-branch --no-delete +``` + + +
diff --git a/content/cli/channel.mdx b/content/cli/channel.mdx new file mode 100644 index 000000000..364ff1ec2 --- /dev/null +++ b/content/cli/channel.mdx @@ -0,0 +1,48 @@ +--- +title: Channels +description: Commands for managing channels in the Knock CLI. +--- + +
+ + +Channel commands enable you to view and manage channels in your Knock account. + + +
+ +
+ + +Lists all channels in your Knock account. You can paginate the results using the `--after` and `--before` flags. + +### Flags + + + + + + + + + + + +```bash title="List all channels" +knock channel list +``` + + +
diff --git a/content/cli/commit.mdx b/content/cli/commit.mdx new file mode 100644 index 000000000..338ec126f --- /dev/null +++ b/content/cli/commit.mdx @@ -0,0 +1,173 @@ +--- +title: Commits +description: Commands for managing commits in the Knock CLI. +--- + +
+ + +Commit commands enable you to manage commits in your Knock account from the CLI. + + +
+ +
+ + +List all commits in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + + + +```bash title="Basic usage" +knock commit list +``` + +```bash title="List unpromoted commits in a different environment" +knock commit list --no-promoted --environment=staging +``` + +```bash title="List commits for a specific workflow" +knock commit list --resource-type=workflow --resource-id=new-commet +``` + + +
+ +
+ + +Shows the details of a given commit, using the `id` of the commit. + +### Flags + + + + + + + + +```bash title="Basic usage" +knock commit get 69cdde18-830a-42e0-ad4b-a230943bdc90 +``` + + +
+ +
+ + +You can commit all changes across all resources in the development environment with the commit command. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock commit -m "Commit message" +``` + + +
+ +
+ + +You can promote one change to the subsequent environment, or all changes across all resources to the target environment from its directly preceding environment, using the `commit promote` command. + +Note: + +- For example, if you have three environments "development", "staging", and "production" (in that order), setting the `--to` flag to `production` will promote all new changes from the staging environment to the production environment. +- Promoting one single commit from staging using the `--only` flag, will result in that commit being promoted to production. +- The `--to` environment must be a non-development environment. +- The `--to` and `--only` flags can't be used together. + +### Flags + + + + + + + + + + +```bash title="Promotes all changes" +knock commit promote --to=production +``` + +```bash title="Promotes one change" +knock commit promote --only=69cdde18-830a-42e0-ad4b-a230943bdc90 +``` + + +
diff --git a/content/cli/email-layout.mdx b/content/cli/email-layout.mdx new file mode 100644 index 000000000..a8f75c5ce --- /dev/null +++ b/content/cli/email-layout.mdx @@ -0,0 +1,320 @@ +--- +title: Email layouts +description: Commands for managing email layouts in the Knock CLI. +--- + +
+ + +Email layout commands enable you to manage email layouts in your Knock account from the CLI. + + +
+ +
+ + +When email layouts are pulled from Knock, they are stored in directories named by their layout key. + +{/* prettier-ignore */} + +{`layouts/ +├── default/ +│ ├── html_layout.html +│ ├── layout.json +│ └── text_layout.txt +└── custom-layout/ + ├── html_layout.html + ├── layout.json + └── text_layout.txt`} + + +If you're migrating your local layout files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock layout push --all`](/cli/email-layout/push). Each `layout.json` file should follow the example shown below; additional information on the Layout structure is defined [here](/mapi-reference/email_layouts/schemas/email_layout). + +```json title="Local layout file example JSON" +{ + "key": "custom-layout", + "name": "Custom Layout", + "html_layout@": "html_layout.html", + "text_layout@": "text_layout.txt", + "footer_links": [{ "text": "My link", "url": "https://example.com" }] +} +``` + + +
+ +
+ + +List all email layouts in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock layout list +``` + + +
+ +
+ + +Fetches a single email layout, using the `key` of the email layout. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + +```bash title="Get layout" +knock layout get default +``` + +```bash title="Get layout in a different environment" +knock layout get default --environment=production +``` + + +
+ +
+ + +Create a new email layout with a minimal configuration. + +The command will create a new layout directory in your local file system. By default, this will be in the layouts resource directory set by your `knock.json` file, or the current working directory if not configured. + +### Flags + + + + + + + + + + + + + +```bash title="Create a layout interactively" +knock layout new +``` + +```bash title="Create a layout with a specific key and name" +knock layout new --key=my-layout --name="My Layout" +``` + +```bash title="Create and push a layout" +knock layout new --key=my-layout --push +``` + + +
+ +
+ + +Pulls the contents of one or all email layouts from Knock into your local file system. Using `` you can pull a single email layout specified by the key, or use the `--all` flag to pull all email layouts from Knock at once. + +By default this command will resolve to the email layouts resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--email-layouts-dir` flag. + +See the [Layout file structure](/cli/email-layout/file-structure) section for details on how layout files are organized. + +### Flags + + + + + + + + + + + +```bash title="Pull a single email layout" +knock layout pull default +``` + +```bash title="Pull all email layouts" +knock layout pull --all +``` + + +
+ +
+ + +Pushes local email layouts back to Knock and upserts them. Using `` you can push a single email layout specified by the key, or use the `--all` flag to push all email layouts from Knock at once. + +By default this command will resolve to the email layouts resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--email-layouts-dir` flag. + +See the [Layout file structure](/cli/email-layout/file-structure) section for details on how layout files are organized. + +### Flags + + + + + + + + + + + +```bash title="Push a single email layout" +knock layout push my-layout +``` + +```bash title="Push all email layouts" +knock layout push --all +``` + + +
+ +
+ + +Validates one or more email layouts. Useful for checking if the layout is valid before running the `email_layout push` command. + +The `` can be provided to validate a single email layout, or your can use the `--all` flag to validate all email layouts. + +Can only be validated against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock layout validate --all +``` + +```bash title="Validate a single email layout" +knock layout validate default +``` + + +
diff --git a/content/cli/environment.mdx b/content/cli/environment.mdx new file mode 100644 index 000000000..7a8eb5e66 --- /dev/null +++ b/content/cli/environment.mdx @@ -0,0 +1,48 @@ +--- +title: Environments +description: Commands for managing environments in the Knock CLI. +--- + +
+ + +Environment commands enable you to view and manage environments in your Knock account. + + +
+ +
+ + +Lists all environments in your Knock account. You can paginate the results using the `--after` and `--before` flags. + +### Flags + + + + + + + + + + + +```bash title="List all environments" +knock environment list +``` + + +
diff --git a/content/cli/guide.mdx b/content/cli/guide.mdx new file mode 100644 index 000000000..66f169472 --- /dev/null +++ b/content/cli/guide.mdx @@ -0,0 +1,390 @@ +--- +title: Guides +description: Commands for managing guides in the Knock CLI. +--- + +
+ + +Guide commands enable you to manage guides in your Knock account from the CLI. + + +
+ +
+ + +When guides are pulled from Knock, they are stored in directories named by their guide key. Each guide directory contains a `guide.json` file that describes the guide's configuration. + +{/* prettier-ignore */} + +{`guides/ +└── conference-banner/ + └── guide.json`} + + +If you're migrating your local guide files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock guide push --all`](/cli/guide/push). + + +
+ +
+ + +You can see all your existing guides in a given environment with the guide list command. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock guide list +``` + +```bash title="Pagination example" +knock guide list --after=xxx +``` + + +
+ +
+ + +Create a new guide with a minimal configuration. + +The command will create a new guide directory in your local file system. By default, this will be in the guides resource directory set by your `knock.json` file, or the current working directory if not configured. + +### Flags + + + + + + + + + + + + + + +```bash title="Create a guide interactively" +knock guide new +``` + +```bash title="Create a guide with a message type" +knock guide new --key=my-guide --message-type=banner +``` + +```bash title="Create and push a guide" +knock guide new --key=my-guide --message-type=modal --push +``` + + +
+ +
+ + +You can show more details about a given guide with the `guide get` command, followed by the target guide key. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + +```bash title="Basic usage" +knock guide get my-guide +``` + +```bash title="Get guide in a different environment" +knock guide get my-guide --environment=production +``` + + +
+ +
+ + +You can pull and download guides from Knock to a local file system with the `guide pull` command. Knock CLI will create a new guide directory or update the existing guide directory in the local file system. + +By default this command will resolve to the guides resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--guides-dir` flag. + +Note: if pulling the target guide for the first time (or all guides), Knock CLI will ask to confirm before writing to the local file system. + +See the [Guide file structure](/cli/guide/file-structure) section for details on how guide files are organized. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock guide pull my-guide +``` + +```bash title="Pulling a guide in a different environment" +knock guide pull my-guide --environment=production +``` + +```bash title="Pulling all guides into ./guides directory" +knock guide pull --all --guides-dir=./guides +``` + + +
+ +
+ + +You can push and upload a guide directory to Knock with the `guide push` command. Knock will update an existing guide by the matching guide key, or create a new guide if it does not exist yet. + +By default this command will resolve to the guides resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--guides-dir` flag. + +Note: + +- The `guide push` command only pushes guides into the `development` environment. +- You must be directly above the target guide directory when running the `guide push` command, so the CLI can locate the `guide.json` file. +- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. + +See the [Guide file structure](/cli/guide/file-structure) section for details on how guide files are organized. + +### Flags + + + + + + + + + + + +```bash title="Basic usage" +knock guide push my-guide +``` + +```bash title="Pushing a guide and committing with a message" +knock guide push my-guide \ + --commit \ + -m "Commit message" +``` + +```bash title="Pushing all guides from ./guides directory" +knock guide push --all --guides-dir=./guides +``` + + +
+ +
+ + +You can validate a new or updated guide directory with the `guide validate` command. Knock will validate the given guide payload in the same way as it would with the `guide push` command, except without persisting those changes. + +Note: Validating a guide is only done against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock guide validate my-guide +``` + + +
+ +
+ + +You can activate or deactivate a guide in a given environment with the `guide activate` command. You can either set the active status immediately or schedule it. + +### Flags + + + + + + + + + + + + +```bash title="Basic usage" +knock guide activate my-guide \ + --environment=development +``` + +```bash title="Deactivating a guide" +knock guide activate my-guide \ + --environment=development \ + --status=false +``` + + +
diff --git a/content/cli/message-type.mdx b/content/cli/message-type.mdx new file mode 100644 index 000000000..ff003332e --- /dev/null +++ b/content/cli/message-type.mdx @@ -0,0 +1,331 @@ +--- +title: Message types +description: Commands for managing message types in the Knock CLI. +--- + +
+ + +Message type commands enable you to manage in-app message types in your Knock account from the CLI. + + +
+ +
+ + +When message types are pulled from Knock, they are stored in directories named by their message type key. Each message type directory contains a `message_type.json` file that describes the message type's schema and configuration, and a `preview.html` file that contains the HTML preview template. + +{/* prettier-ignore */} + +{`message-types/ +└── alert-banner/ + ├── message_type.json + └── preview.html`} + + +If you're migrating your local message type files into Knock, you can arrange them using the example file structure above. + + +
+ +
+ + +Display all in-app message types for an environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock message-type list +``` + +```bash title="Pagination example" +knock message-type list --after=xxx +``` + + +
+ +
+ + +Create a new message type with a minimal configuration. + +The command will create a new message type directory in your local file system. By default, this will be in the message-types resource directory set by your `knock.json` file, or the current working directory if not configured. + +### Flags + + + + + + + + + + + + + +```bash title="Create a message type interactively" +knock message-type new +``` + +```bash title="Create a message type with a specific key and name" +knock message-type new --key=banner --name="Banner" +``` + +```bash title="Create and push a message type" +knock message-type new --key=my-message-type --push +``` + + +
+ +
+ + +Display a single in-app message type from an environment. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + +```bash title="Basic usage" +knock message-type get my-message-type +``` + +```bash title="Get message type in a different environment" +knock message-type get my-message-type --environment=production +``` + + +
+ +
+ + +Pull one or more in-app message types from an environment into a local file system. Knock CLI will create a new message type directory or update the existing message type directory in the local file system. + +By default this command will resolve to the message types resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--message-types-dir` flag. + +Note: if pulling the target message type for the first time (or all message types), Knock CLI will ask to confirm before writing to the local file system. + +See the [Message type file structure](/cli/message-type/file-structure) section for details on how message type files are organized. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock message-type pull my-message-type +``` + +```bash title="Pulling a message type in a different environment" +knock message-type pull my-message-type --environment=production +``` + +```bash title="Pulling all message types into ./message-types directory" +knock message-type pull --all --message-types-dir=./message-types +``` + + +
+ +
+ + +Push one or more message types from a local file system to Knock. Knock will update an existing message type by the matching message type key, or create a new message type if it does not exist yet. + +By default this command will resolve to the message types resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--message-types-dir` flag. + +Note: + +- The `message-type push` command only pushes message types into the `development` environment. +- You must be directly above the target message type directory when running the `message-type push` command, so the CLI can locate the `message_type.json` file. +- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. + +See the [Message type file structure](/cli/message-type/file-structure) section for details on how message type files are organized. + +### Flags + + + + + + + + + + + +```bash title="Basic usage" +knock message-type push my-message-type +``` + +```bash title="Pushing a message type and committing with a message" +knock message-type push my-message-type \ + --commit \ + -m "Commit message" +``` + +```bash title="Pushing all message types from ./message-types directory" +knock message-type push --all --message-types-dir=./message-types +``` + + +
+ +
+ + +Validate one or more message types from a local file system. Knock will validate the given message type payload in the same way as it would with the `message-type push` command, except without persisting those changes. + +Note: Validating a message type is only done against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock message-type validate my-message-type +``` + + +
diff --git a/content/cli/overview.mdx b/content/cli/overview.mdx new file mode 100644 index 000000000..ede884d13 --- /dev/null +++ b/content/cli/overview.mdx @@ -0,0 +1,179 @@ +--- +title: CLI reference +description: Learn more about the commands and flags available in the Knock CLI. +tags: ["cli", "command line", "cmd", "command-line", "terminal"] +--- + +
+ + +This reference documents every command and flag available in Knock's command-line interface. + +The Knock CLI helps you work with your Knock resources right from the terminal. + +With the CLI, you can: + +- Work with your Knock workflows and notification templates locally. +- Integrate Knock into your CI/CD environment to automatically promote changes. +- Map your translation files into Knock to localize your notifications. + + +
+ +
+ + +**Install with Homebrew** + +For macOS, you can install the Knock CLI using [Homebrew](https://brew.sh/). Once the CLI is installed you can call it by using the `knock` command in your terminal. + +**Install with npm** + +For other operating systems, you can install the Knock CLI using `npm`, a node package manager. Once the CLI is installed, you can call it by using the `knock` command in your terminal. + +**Requirements** + +The Knock CLI is built with Node.js and installable as a `npm` package. You must have `node` and `npm` installed already, with the following versions: + +- Node.js: 16.14.0 or higher +- NPM: 7.18.1 or higher + +You can find the Knock CLI npm package [here](https://www.npmjs.com/package/@knocklabs/cli). + + + + +```bash title="Install the Knock CLI with homebrew" +brew install knocklabs/tap/knock +``` + +```bash title="Install the Knock CLI with npm" +npm install -g @knocklabs/cli +``` + + +
+ +
+ + +**Using your Knock account** + +You can authenticate your Knock account against the CLI by running `knock login`. This will open a browser window where you can sign in to your Knock account and authorize the CLI to access your account. + +Once authenticated, you can verify it works by running `knock whoami`. If your account is valid and configured properly, you'll receive a 200 response that shows the account name and your user ID. + + + Using your Knock account against the CLI will inherit the permissions of + the user that is logged in on the account you authorized. + + } +/> + +If you need to switch between accounts, you can run `knock logout` to log out of your current account and log in to a different one. + +**Using a service token** + +If you need to authenticate in a remote environment, or want complete control, you can generate a [service token](/developer-tools/service-tokens) in the Knock dashboard. You can specify a service token in all CLI calls, or you can optionally use a configuration file to authenticate all requests. + +Once you have generated a service token, you can verify it works by running `knock whoami --service-token=YOUR_SERVICE_TOKEN`. If your token is valid and configured properly, you'll receive a 200 response that shows the account name and the service token name. + +**Setting up a configuration file (optional)** + +A service token is required by the CLI for most commands. For convenience, Knock CLI supports a user configuration file, where you can store the service token for the CLI to read automatically rather than having to manually pass in with `--service-token` flag for every command. + +To set up a user configuration file, create a `config.json` file in the Knock CLI's config directory at `~/.config/knock` (macOS/Unix) or `%LOCALAPPDATA%\knock` (Windows), and add the following json: + +```json title="config.json" +{ + "serviceToken": "YOUR_SERVICE_TOKEN" +} +``` + +When Knock CLI detects a user configuration file, it will use the service token provided in it automatically. + + + + +```bash title="Log in to your Knock account" +knock login +``` + +```bash title="Verify your service token" +knock whoami --service-token=XXX +``` + + +
+ +
+ + +The following flags are supported for every command. + +**Flags** + + + + + + +
+ +
+ + You can configure your Knock project by creating a `knock.json` file or by using the `knock init` command to generate one for you. This + file is a project-level configuration file that tells the Knock CLI where to + find your Knock resources. + + For example, if you want to store your Knock resources in the `.knock/` directory, you can create a `knock.json` file with the following content: + +{/* prettier-ignore */} +```json title="Example knock.json file" +{ + "knockDir": ".knock/" +} +``` + + Once you have created the `knock.json` file, all subsequent `knock pull` and `knock push` commands will use the `.knock/` directory as the default target directory relative to the location of the `knock.json` file, regardless of the directory you are currently in. + + If you need to specify a different target directory for a single command, you can use the `--knock-dir` flag, or the `--{resource-type}-dir` flag for specific resource types. + + +
+ +
+ + +There is no required directory structure when working with Knock resources locally. However, if you use the `knock pull` or `knock push` commands, they will produce and expect the directory structure outlined below. + +For forward compatibility, we recommend using this structure to ensure your local files work seamlessly with future CLI updates. + +When you use `knock pull`, resources will be grouped by resource type within subdirectories. The following directory structure will be created: + +{/* prettier-ignore */} + +{`./knock/ +├── guides/ +├── layouts/ +├── message-types/ +├── partials/ +├── translations/ +└── workflows/`} + + +Each resource type has its own directory structure, which is described in detail in the sections below for each resource type. + + +
diff --git a/content/cli/partial.mdx b/content/cli/partial.mdx new file mode 100644 index 000000000..b319fd75f --- /dev/null +++ b/content/cli/partial.mdx @@ -0,0 +1,337 @@ +--- +title: Partials +description: Commands for managing partials in the Knock CLI. +--- + +
+ + +Partial commands enable you to manage partials in your Knock account from the CLI. + + +
+ +
+ + +When partials are pulled from Knock, they are stored in directories named by their partial key. Each partial directory contains a `partial.json` file that describes the partial's properties, and a content file based on the partial's type (HTML, markdown, plaintext, or JSON). + +{/* prettier-ignore */} + +{`partials/ +└── author-block/ + ├── content.html + └── partial.json`} + + +The content file name depends on the partial's type: + +- HTML partials: `content.html` +- Markdown partials: `content.md` +- Plaintext partials: `content.txt` +- JSON partials: `content.json` + +If you're migrating your local partial files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock partial push --all`](/cli/partial/push). + + +
+ +
+ + +List all partials in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock partial list +``` + + +
+ +
+ + +Create a new partial with a minimal configuration. + +The command will create a new partial directory in your local file system. By default, this will be in the partials resource directory set by your `knock.json` file, or the current working directory if not configured. + +### Flags + + + + + + + + + + + + + + +```bash title="Create a partial interactively" +knock partial new +``` + +```bash title="Create an HTML partial" +knock partial new --key=my-partial --type=html +``` + +```bash title="Create and push a partial" +knock partial new --key=my-partial --type=markdown --push +``` + + +
+ +
+ + +You can show more details about a given partial with the `partial get` command, followed by the target partial key. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + +```bash title="Get partial" +knock partial get my-partial +``` + +```bash title="Get partial in a different environment" +knock partial get my-partial --environment=production +``` + + +
+ +
+ + +You can pull and download partial files from Knock to a local file system with the `partial pull` command. Knock CLI will create a new partial directory or update the existing partial directory in the local file system. + +By default this command will resolve to the partial resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--partials-dir` flag. + +Note: if pulling the target partial for the first time (or all partials), Knock CLI will ask to confirm before writing to the local file system. + +See the [Partial file structure](/cli/partial/file-structure) section for details on how partial files are organized. + +### Flags + + + + + + + + + + + + + +```bash title="Pull a single partial" +knock partial pull my-partial +``` + +```bash title="Pull all partials" +knock partial pull --all +``` + + +
+ +
+ + +You can push and upload a partial directory to Knock with the `partial push` command. Knock will update an existing partial by the matching partial key, or create a new partial if it does not exist yet. + +By default this command will resolve to the partial resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--partials-dir` flag. + +Note: + +- The `partial push` command only pushes partials into the `development` environment. +- You must be directly above the target partial directory when running the `partial push` command, so the CLI can locate the `partial.json` file. +- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. + +### Flags + + + + + + + + + + + +```bash title="Push a single partial" +knock partial push my-partial +``` + +```bash title="Pushing a partial and committing with a message" +knock partial push my-partial \ + --commit \ + -m "Commit message" +``` + +```bash title="Pushing all partials from ./partials directory" +knock partial push --all --partials-dir=./partials +``` + + +
+ +
+ + +Validates one or more partial files. Useful for checking if the file is valid before running the `partial push` method. + +Can only be validated against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock partial validate --all +``` + +```bash title="Validate a single partial" +knock partial validate my-partial +``` + + +
diff --git a/content/cli/resources.mdx b/content/cli/resources.mdx new file mode 100644 index 000000000..b6e5e0cc5 --- /dev/null +++ b/content/cli/resources.mdx @@ -0,0 +1,121 @@ +--- +title: Managing resources +description: Commands for managing all Knock resources at once. +--- + +
+ + +These commands enable you to manage all Knock resources (workflows, partials, email layouts, translations, guides, and message-types) at once. + + +
+ +
+ + +Initializes a new Knock project by creating a `knock.json` file in the current working directory. + +### Flags + + + + + + + + +```bash title="Initialize a new Knock project" +knock init +``` + + +
+ +
+ + +Pulls the contents of all Knock resources (workflows, partials, email layouts, translations, guides, and message-types) from Knock into your local file system. + +Resources will be grouped by resource type within subdirectories of the target directory path set either by your `knock.json` file or by the `--knock-dir` flag. See the [Directory structure](/cli/overview/directory-structure) section for details on the directory structure used by `push` and `pull` commands. + +### Flags + + + + + + + + + + + + +```bash title="Pull all resources" +knock pull --knock-dir=./knock +``` + + +
+ +
+ + +Pushes all local resource files (workflows, partials, email layouts, and translations) back to Knock and upserts them. + +Resources will be pushed to the target directory path set either by your `knock.json` file or by the `--knock-dir` flag. See the [Directory structure](/cli/overview/directory-structure) section for details on the directory structure used by `push` and `pull` commands. + +### Flags + + + + + + + + + + +```bash title="Push all resources" +knock push --knock-dir=./knock +``` + + +
diff --git a/content/cli/translation.mdx b/content/cli/translation.mdx new file mode 100644 index 000000000..a3a975aa2 --- /dev/null +++ b/content/cli/translation.mdx @@ -0,0 +1,298 @@ +--- +title: Translations +description: Commands for managing translations in the Knock CLI. +--- + +
+ + +Translation commands enable you to manage translations in your Knock account from the CLI. + + +
+ +
+ + +When translations are pulled from Knock, they are stored in directories named by their locale codes. Their filename will be their locale code. Any namespaced translations will prepend the namespace to the filename, with `.` used as a separator. + +{/* prettier-ignore */} + +{`translations/ +├── en/ +│ ├── en.json +│ └── admin.en.json +└── en-GB/ + ├── en-GB.json + └── tasks.en-GB.json`} + + +If you're migrating your local translation files into Knock, you can arrange them using the file structure above and then push them into Knock with a single command using [`knock translation push --all`](/cli/translation/push). Each `.json` or `..json` file should follow the structure defined [here](/mapi-reference/translations/schemas/translation). + + +
+ +
+ + +List all translations in the environment. Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock translation list +``` + + +
+ +
+ + +You can show the content of a given translation with the `translation get` command, followed by the target translation ref. + +The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + +```bash title="Get translation without a namespace" +knock translation get en +``` + +```bash title="Get translation with a namespace" +knock translation get admin.en +``` + +```bash title="Get translation in a different environment" +knock translation get en --environment=production +``` + + +
+ +
+ + +You can pull and download translation files from Knock to a local file system with the `translation pull` command. Knock CLI will create a new translation file or update the existing file in the local file system. + +The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. + +When `` is a locale code and specified with the `--all` flag, all translations for that locale are pulled. + +By default this command will resolve to the translations resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--translations-dir` flag. + +See the [Translation file structure](/cli/translation/file-structure) section for details on how translation files are organized. + +### Flags + + + + + + + + + + + + +```bash title="Pull a single translation without a namespace" +knock translation pull en +``` + +```bash title="Pull a single translation with a namespace" +knock translation pull admin.en +``` + +```bash title="Pull all translations for a locale" +knock translation pull en --all +``` + +```bash title="Pull all translations" +knock translation pull --all +``` + +```bash title="Pull all translations as PO files" +knock translation pull --all --format=po +``` + + +
+ +
+ + +Pushes local translation files back to Knock and upserts them. + +The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. + +When `` is a locale code and specified with the `--all` flag, all translations for that locale are pushed. + +By default this command will resolve to the translations resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--translations-dir` flag. + +See the [Translation file structure](/cli/translation/file-structure) section for details on how translation files are organized. + +### Flags + + + + + + + + + + + +```bash title="Push a single translation without a namespace" +knock translation push en +``` + +```bash title="Push a single translation with a namespace" +knock translation push tasks.en +``` + +```bash title="Push all translation files for the en locale" +knock translation push en --all +``` + +```bash title="Push all translation files" +knock translation push --all +``` + + +
+ +
+ + +Validates one or more translation files. Useful for checking if the file is valid before running the `translation push` method. + +The `` is a identifier string that refers to a unique translation file. If a translation has no namespace, it is the same as the locale, i.e. `en`. If namespaced, it is formatted as `namespace.locale`, i.e. `admin.en`. + +Can only be validated against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock translation validate --all +``` + +```bash title="Validate a single file" +knock translation validate admin.en +``` + +```bash title="Validate all translations for the en locale" +knock translation validate en --all +``` + + +
diff --git a/content/cli/workflow.mdx b/content/cli/workflow.mdx new file mode 100644 index 000000000..98cd36a94 --- /dev/null +++ b/content/cli/workflow.mdx @@ -0,0 +1,513 @@ +--- +title: Workflows +description: Commands for managing workflows in the Knock CLI. +--- + +
+ + +Workflow commands enable you to manage workflows in your Knock account from the CLI. + + +
+ +
+ + +When workflows are pulled from Knock, they are stored in directories named by their workflow key. In addition to a `workflow.json` file that describes all of a given workflow's steps, each workflow directory also contains individual folders for each of the [channel steps](/designing-workflows/channel-step) in the workflow that hold additional content and formatting data. + +{/* prettier-ignore */} + +{`workflows/ +└── my-workflow/ + ├── email_1/ + │ ├── visual_blocks/ + │ │ └── 1.content.md + │ └── visual_blocks.json + ├── in_app_feed_1/ + │ └── markdown_body.md + └── workflow.json`} + + +If you're migrating your local workflow files into Knock, you can arrange them using the example file structure above and then push them into Knock with a single command using [`knock workflow push --all`](/cli/workflow/push). Each `workflow.json` file should follow the structure defined [here](/mapi-reference/workflows/schemas/workflow). + + +
+ +
+ + +You can see all your existing workflows in a given environment with the workflow list command. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock workflow list +``` + +```bash title="Pagination example" +knock workflow list --after=xxx +``` + + +
+ +
+ + +You can show more details about a given workflow with the `workflow get` command, followed by the target workflow key. + +Use an `--environment` flag to specify the target environment; if omitted, the Knock CLI defaults to the development environment. + +### Flags + + + + + + + + + + +```bash title="Basic usage" +knock workflow get my-workflow +``` + +```bash title="Get workflow in a different environment" +knock workflow get my-workflow --environment=production +``` + + +
+ +
+ + +Create a new workflow with a minimal configuration. You can either select steps interactively or use a template to scaffold the workflow. + +The command will create a new workflow directory in your local file system. By default, this will be in the workflows resource directory set by your `knock.json` file, or the current working directory if not configured. + +### Flags + + + + + + + + + + + + + + + +```bash title="Create a workflow interactively" +knock workflow new +``` + +```bash title="Create a workflow with a specific key" +knock workflow new --key=my-workflow +``` + +```bash title="Create a workflow with specific steps" +knock workflow new --key=my-workflow --steps=email,sms,in_app_feed +``` + +```bash title="Create a workflow from a template" +knock workflow new --key=my-workflow --template=workflows/digest-email +``` + +```bash title="Create and push a workflow" +knock workflow new --key=my-workflow --steps=email --push +``` + + +
+ +
+ + +You can pull and download workflows with its message templates from Knock to a local file system with the `workflow pull` command. Knock CLI will create a new workflow directory or update the existing workflow directory in the local file system. + +By default this command will resolve to the workflows resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--workflows-dir` flag. + +Note: if pulling the target workflow for the first time (or all workflows), Knock CLI will ask to confirm before writing to the local file system. + +See the [Workflow file structure](/cli/workflow/file-structure) section for details on how workflow files are organized. + +### Flags + + + + + + + + + + + + + +```bash title="Basic usage" +knock workflow pull my-workflow +``` + +```bash title="Pulling a workflow in a different environment" +knock workflow pull my-workflow --environment=production +``` + +```bash title="Pulling all workflows into ./workflows directory" +knock workflow pull --all --workflows-dir=./workflows +``` + + +
+ +
+ + +You can push and upload a workflow directory to Knock with the `workflow push` command. Knock will update an existing workflow by the matching workflow key, or create a new workflow if it does not exist yet. + +By default this command will resolve to the workflows resource directory via your `knock.json` file. When not set, will use the current working directory as the default. In the case of the `--all` flag, the target directory path will be resolved via your `knock.json` file or the `--workflows-dir` flag. + +Note: + +- The `workflow push` command only pushes workflows into the `development` environment. +- You must be directly above the target workflow directory when running the `workflow push` command, so the CLI can locate the `workflow.json` file. +- You can also pass in the `--commit` flag (with an optional `--commit-message` flag) to commit the upserted changes immediately. + +See the [Workflow file structure](/cli/workflow/file-structure) section for details on how workflow files are organized. + +### Flags + + + + + + + + + + + +```bash title="Basic usage" +knock workflow push my-workflow +``` + +```bash title="Pushing a workflow and committing with a message" +knock workflow push my-workflow \ + --commit \ + -m "Commit message" +``` + +```bash title="Pushing all workflows from ./workflows directory" +knock workflow push --all --workflows-dir=./workflows +``` + + +
+ +
+ + +You can run a workflow with the `workflow run` command. Knock will execute a run for the latest saved version of the workflow it finds with the given key and parameters you send it. + +Note: + +- Changes to the local version of the workflow in your file system will not be reflected in a workflow run; it will use the current version that is stored in Knock. + +### Flags + + + + + + + + + + + + + + +```bash title="Basic usage" +knock workflow run my-workflow \ + --environment=production \ + --recipients=ellie +``` + + +
+ +
+ + +You can validate a new or updated workflow directory with the `workflow validate` command. Knock will validate the given workflow payload in the same way as it would with the `workflow push` command, except without persisting those changes. + +Note: Validating a workflow is only done against the `development` environment. + +### Flags + + + + + + + + + +```bash title="Basic usage" +knock workflow validate my-workflow +``` + + +
+ +
+ + +You can activate or deactivate a workflow in a given environment with the `workflow activate` command. + +Note: + +- This immediately enables or disables a workflow in a given environment without needing to go through environment promotion. +- By default, this command activates a given workflow. Pass in the `--status` flag with `false` in order to deactivate it. + +### Flags + + + + + + + + + + +```bash title="Basic usage" +knock workflow activate my-workflow \ + --environment=development +``` + +```bash title="Deactivating a workflow" +knock workflow activate my-workflow \ + --environment=development \ + --status=false +``` + + +
+ +
+ + +Generate type definitions for workflow trigger data from your workflow schemas. This command fetches workflows with trigger data schemas and generates type-safe definitions for TypeScript, Python, Ruby, and Go. + +The generated types enable compile-time safety when triggering workflows in your application code, helping catch integration errors before runtime. The target language is inferred from the output file extension. + +Learn more about [type safety with workflows](/developer-tools/type-safety). + +### Flags + + + + + + + + + +```bash title="Generate TypeScript types" +knock workflow generate-types \ + --output-file=./types/knock-workflows.ts +``` + +```bash title="Generate Python types" +knock workflow generate-types \ + --output-file=./types/knock_workflows.py +``` + +```bash title="Generate Ruby types" +knock workflow generate-types \ + --output-file=./types/knock_workflows.rb +``` + +```bash title="Generate Go types" +knock workflow generate-types \ + --output-file=./types/knock_workflows.go +``` + +```bash title="Generate from production environment" +knock workflow generate-types \ + --environment=production \ + --output-file=./types/knock-workflows.ts +``` + + +
diff --git a/data/sidebars/cliSidebar.ts b/data/sidebars/cliSidebar.ts index cfbc4240f..d71535eb4 100644 --- a/data/sidebars/cliSidebar.ts +++ b/data/sidebars/cliSidebar.ts @@ -5,7 +5,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Getting started", slug: "/cli/overview", pages: [ - { slug: "", title: "Introduction" }, + { slug: "/", title: "Overview" }, { slug: "/installation", title: "Install the Knock CLI" }, { slug: "/authentication", title: "Authentication" }, { slug: "/global-flags", title: "Global flags" }, @@ -17,7 +17,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ { title: "Authentication", - slug: "/cli", + slug: "/cli/authentication", pages: [ { slug: "/login", title: "Login" }, { slug: "/logout", title: "Logout" }, @@ -26,7 +26,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ { title: "Managing resources", - slug: "/cli", + slug: "/cli/resources", pages: [ { slug: "/init", title: "Initialize a new project" }, { slug: "/pull", title: "Pull all resources" }, @@ -37,13 +37,19 @@ export const CLI_SIDEBAR: SidebarContent[] = [ { title: "Environments", slug: "/cli/environment", - pages: [{ slug: "/list", title: "List environments" }], + pages: [ + { slug: "/", title: "Overview" }, + { slug: "/list", title: "List environments" }, + ], }, { title: "Channels", slug: "/cli/channel", - pages: [{ slug: "/list", title: "List channels" }], + pages: [ + { slug: "/", title: "Overview" }, + { slug: "/list", title: "List channels" }, + ], }, { @@ -51,7 +57,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ slug: "/cli/branch", isBeta: true, pages: [ - { slug: "/overview", title: "Overview" }, + { slug: "/", title: "Overview" }, { slug: "/list", title: "List branches" }, { slug: "/create", title: "Create branches" }, { slug: "/delete", title: "Delete branches" }, @@ -65,6 +71,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Workflows", slug: "/cli/workflow", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List workflows" }, { slug: "/get", title: "Get workflows" }, @@ -82,6 +89,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Email layouts", slug: "/cli/email-layout", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List email layouts" }, { slug: "/get", title: "Get email layouts" }, @@ -96,6 +104,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Translations", slug: "/cli/translation", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List translations" }, { slug: "/get", title: "Get translations" }, @@ -109,6 +118,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Partials", slug: "/cli/partial", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List partials" }, { slug: "/new", title: "Create a new partial" }, @@ -123,6 +133,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Commits", slug: "/cli/commit", pages: [ + { slug: "/", title: "Overview" }, { slug: "/list", title: "List commits" }, { slug: "/get", title: "Get commits" }, { slug: "/all", title: "Commit changes" }, @@ -134,6 +145,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Guides", slug: "/cli/guide", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List guides" }, { slug: "/new", title: "Create a new guide" }, @@ -148,6 +160,7 @@ export const CLI_SIDEBAR: SidebarContent[] = [ title: "Message types", slug: "/cli/message-type", pages: [ + { slug: "/", title: "Overview" }, { slug: "/file-structure", title: "File structure" }, { slug: "/list", title: "List message types" }, { slug: "/new", title: "Create a new message type" }, diff --git a/data/specs/api/customizations.yml b/data/specs/api/customizations.yml index d9e190268..f2e4c7e4c 100644 --- a/data/specs/api/customizations.yml +++ b/data/specs/api/customizations.yml @@ -121,3 +121,16 @@ resources: name: Shared description: |- Resources that are shared across the API. + integrations: + name: Integrations + description: |- + Integrations are used to connect your system to external services. + subresources: + census: + name: Census + description: |- + Census is a service that allows you to sync user segments from your data warehouse to Knock. + hightouch: + name: Hightouch + description: |- + Hightouch is a service that allows you to sync user segments from your data warehouse to Knock. diff --git a/data/specs/mapi/customizations.yml b/data/specs/mapi/customizations.yml index 31e38334d..93d73cf0c 100644 --- a/data/specs/mapi/customizations.yml +++ b/data/specs/mapi/customizations.yml @@ -36,6 +36,10 @@ resources: name: Channels description: |- Channels are the delivery mechanisms for your notifications. + channel_groups: + name: Channel groups + description: |- + Channel groups are a way to group channels together to use as a single destination, or to conditionally apply rules to determine which channel(s) should be used. partials: name: Partials description: |- @@ -76,3 +80,7 @@ resources: name: Shared description: |- Resources that are shared across the API. + auth: + name: Authentication + description: |- + Authentication methods for the Knock management API. diff --git a/layouts/CliReferenceLayout.tsx b/layouts/CliReferenceLayout.tsx index c2c5a9cd5..ed95133e0 100644 --- a/layouts/CliReferenceLayout.tsx +++ b/layouts/CliReferenceLayout.tsx @@ -8,19 +8,41 @@ import { CLI_SIDEBAR } from "../data/sidebars/cliSidebar"; import { ContentActions } from "../components/ui/ContentActions"; import { useScrollToTop } from "../hooks/useScrollToTop"; -export const CliReferenceLayout = ({ frontMatter, children }) => { +interface CliReferenceLayoutProps { + frontMatter: { + title?: string; + metaTitle?: string; + description?: string; + metaDescription?: string; + }; + sourcePath?: string; + children: React.ReactNode; +} + +export const CliReferenceLayout = ({ + frontMatter, + children, +}: CliReferenceLayoutProps) => { const router = useRouter(); useInitialScrollState(); let paths = slugToPaths(router.query.slug); useScrollToTop(paths); + // Build canonical path from the current route + const canonicalPath = router.asPath.split("#")[0].split("?")[0]; + + // Get the resource name from the route (e.g., "overview", "resources", "workflow") + // This is the first segment after /cli/ and stays constant regardless of scroll position + const resource = router.query.resource as string; + const mdPath = resource ? `/cli/${resource}.md` : undefined; + return ( { title={frontMatter.title} description={frontMatter.description} bottomContent={ - + } /> {children} diff --git a/lib/openApiSpec.ts b/lib/openApiSpec.ts index 8f2e39d95..9b65e08a8 100644 --- a/lib/openApiSpec.ts +++ b/lib/openApiSpec.ts @@ -1,9 +1,15 @@ import { dereference } from "@scalar/openapi-parser"; +import { OpenAPIV3 } from "@scalar/openapi-types"; import deepmerge from "deepmerge"; import { readFile } from "fs/promises"; +import JSONPointer from "jsonpointer"; import safeStringify from "safe-stringify"; import { parse } from "yaml"; +// ============================================================================ +// Stainless Spec Types +// ============================================================================ + type StainlessResourceMethod = | string | { @@ -17,15 +23,7 @@ type StainlessResource = { description?: string; models?: Record; methods?: Record; - subresources?: Record< - string, - { - name?: string; - description?: string; - models?: Record; - methods?: Record; - } - >; + subresources?: Record; }; interface StainlessConfig { @@ -35,24 +33,209 @@ interface StainlessConfig { environments: Record; } +// ============================================================================ +// Page Data Types (for multi-page API reference) +// ============================================================================ + +/** + * Data for a single method page (e.g., /api-reference/users/get) + */ +type MethodPageData = { + resourceName: string; + resourceTitle: string; + methodName: string; + methodType: string; + endpoint: string; + operation: OpenAPIV3.OperationObject; + baseUrl: string; + // Subresource path if this method is in a subresource (e.g., ["feeds"]) + // Use null instead of undefined for JSON serialization compatibility + subresourcePath: string[] | null; +}; + +/** + * Data for a single schema page (e.g., /api-reference/users/schemas/user) + */ +type SchemaPageData = { + resourceName: string; + resourceTitle: string; + schemaName: string; + schemaRef: string; + schema: OpenAPIV3.SchemaObject; + // Subresource path if this schema is in a subresource + // Use null instead of undefined for JSON serialization compatibility + subresourcePath: string[] | null; +}; + +/** + * Summary info for a method in resource overview + */ +type MethodSummary = { + methodName: string; + methodType: string; + endpoint: string; + summary: string; +}; + +/** + * Summary info for a schema in resource overview + */ +type SchemaSummary = { + schemaName: string; + title: string; +}; + +/** + * Summary info for a subresource in resource overview + */ +type SubresourceSummary = { + name: string; + title: string; + methodCount: number; +}; + +/** + * Data for a resource overview page (e.g., /api-reference/users) + */ +type ResourceOverviewData = { + resourceName: string; + resource: { + name: string | null; + description: string | null; + }; + methods: MethodSummary[]; + schemas: SchemaSummary[]; + subresources: SubresourceSummary[]; +}; + +/** + * Sidebar page entry + */ +type SidebarPage = { + slug: string; + title: string; + pages?: SidebarPage[]; +}; + +/** + * Sidebar section for a resource + */ +type SidebarSection = { + title: string; + slug: string; + pages: SidebarPage[]; +}; + +/** + * Complete sidebar data for API reference navigation + */ +type SidebarData = { + resources: SidebarSection[]; +}; + +// ============================================================================ +// Spec Name Type +// ============================================================================ + +type SpecName = "api" | "mapi"; + +// ============================================================================ +// Helper Functions +// ============================================================================ + function yamlToJson(yaml: string) { const json = parse(yaml); return json; } -async function readOpenApiSpec(specName: string) { - const spec = await readFile(`./data/specs/${specName}/openapi.yml`, "utf8"); - const jsonSpec = yamlToJson(spec); - const { schema } = await dereference(jsonSpec); +/** + * Resolve endpoint configuration to [methodType, endpoint] tuple. + * Handles both string format ("get /v1/users") and object format ({ endpoint: "get /v1/users" }) + */ +function resolveEndpoint( + methodConfig: StainlessResourceMethod, +): [string, string] { + const endpointString = + typeof methodConfig === "string" ? methodConfig : methodConfig.endpoint; - return JSON.parse(safeStringify(schema)); + const [methodType, endpoint] = endpointString.split(" "); + return [methodType.toLowerCase(), endpoint]; +} + +// ============================================================================ +// Spec Loading Functions (with caching) +// ============================================================================ + +// Module-level caches to avoid re-reading and re-parsing specs for each page +const openApiSpecCache: Record = {}; +const stainlessSpecCache: Record = {}; +const schemaReferencesCache: Record> = {}; +const sidebarDataCache: Record = {}; + +// Promises to handle concurrent requests for the same spec +const openApiSpecPromises: Record< + string, + Promise | undefined +> = {}; +const stainlessSpecPromises: Record< + string, + Promise | undefined +> = {}; + +async function readOpenApiSpec(specName: string): Promise { + // Return cached result if available + if (openApiSpecCache[specName]) { + return openApiSpecCache[specName]; + } + + // If already loading, wait for that promise + const existingPromise = openApiSpecPromises[specName]; + if (existingPromise) { + return existingPromise; + } + + // Start loading and cache the promise + const loadPromise = (async (): Promise => { + const spec = await readFile(`./data/specs/${specName}/openapi.yml`, "utf8"); + const jsonSpec = yamlToJson(spec); + const { schema } = await dereference(jsonSpec); + + const result = JSON.parse(safeStringify(schema)) as OpenAPIV3.Document; + openApiSpecCache[specName] = result; + return result; + })(); + + openApiSpecPromises[specName] = loadPromise; + return loadPromise; } async function readStainlessSpec(specName: string): Promise { - const customizations = await readSpecCustomizations(specName); - const spec = await readFile(`./data/specs/${specName}/stainless.yml`, "utf8"); - const stainlessSpec = parse(spec); - return deepmerge(stainlessSpec, customizations); + // Return cached result if available + if (stainlessSpecCache[specName]) { + return stainlessSpecCache[specName]; + } + + // If already loading, wait for that promise + const existingPromise = stainlessSpecPromises[specName]; + if (existingPromise) { + return existingPromise; + } + + // Start loading and cache the promise + const loadPromise = (async (): Promise => { + const customizations = await readSpecCustomizations(specName); + const spec = await readFile( + `./data/specs/${specName}/stainless.yml`, + "utf8", + ); + const stainlessSpec = parse(spec); + const result = deepmerge(stainlessSpec, customizations) as StainlessConfig; + stainlessSpecCache[specName] = result; + return result; + })(); + + stainlessSpecPromises[specName] = loadPromise; + return loadPromise; } async function readSpecCustomizations(specName: string) { @@ -65,5 +248,643 @@ async function readSpecCustomizations(specName: string) { return customizations; } -export type { StainlessResource, StainlessConfig }; -export { readOpenApiSpec, readStainlessSpec }; +// ============================================================================ +// Resource Order +// ============================================================================ + +/** + * Get the ordered list of resource names for a spec. + * This determines the order resources appear in the sidebar. + */ +async function getResourceOrder(specName: SpecName): Promise { + const stainlessSpec = await readStainlessSpec(specName); + // Return all resource keys from the spec + // For consistent ordering, we can sort alphabetically or maintain spec order + return Object.keys(stainlessSpec.resources); +} + +// ============================================================================ +// Method Page Data Loader +// ============================================================================ + +/** + * Navigate to a subresource using a path array. + * Returns the subresource at the given path, or undefined if not found. + */ +function getSubresource( + resource: StainlessResource, + subresourcePath: string[], +): StainlessResource | undefined { + let current: StainlessResource | undefined = resource; + + for (const pathSegment of subresourcePath) { + if (!current?.subresources?.[pathSegment]) { + return undefined; + } + current = current.subresources[pathSegment]; + } + + return current; +} + +/** + * Load data for a single method page. + * Supports methods in both top-level resources and subresources. + */ +async function getMethodPageData( + specName: SpecName, + resourceName: string, + methodName: string, + subresourcePath: string[] = [], +): Promise { + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const resource = stainlessSpec.resources[resourceName]; + if (!resource) { + return null; + } + + // Navigate to the target resource (may be a subresource) + const targetResource = + subresourcePath.length > 0 + ? getSubresource(resource, subresourcePath) + : resource; + + if (!targetResource?.methods?.[methodName]) { + return null; + } + + const methodConfig = targetResource.methods[methodName]; + const [methodType, endpoint] = resolveEndpoint(methodConfig); + const operation = openApiSpec.paths?.[endpoint]?.[ + methodType as keyof OpenAPIV3.PathItemObject + ] as OpenAPIV3.OperationObject | undefined; + + if (!operation) { + return null; + } + + // Determine the resource title (use parent resource name for subresources) + const resourceTitle = resource.name || resourceName; + + return { + resourceName, + resourceTitle, + methodName, + methodType, + endpoint, + operation, + baseUrl: stainlessSpec.environments.production, + subresourcePath: subresourcePath.length > 0 ? subresourcePath : null, + }; +} + +// ============================================================================ +// Schema Page Data Loader +// ============================================================================ + +/** + * Load data for a single schema page. + * Supports schemas in both top-level resources and subresources. + */ +async function getSchemaPageData( + specName: SpecName, + resourceName: string, + schemaName: string, + subresourcePath: string[] = [], +): Promise { + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const resource = stainlessSpec.resources[resourceName]; + if (!resource) { + return null; + } + + // Navigate to the target resource (may be a subresource) + const targetResource = + subresourcePath.length > 0 + ? getSubresource(resource, subresourcePath) + : resource; + + if (!targetResource?.models?.[schemaName]) { + return null; + } + + const schemaRef = targetResource.models[schemaName]; + const schema = JSONPointer.get(openApiSpec, schemaRef.replace("#", "")) as + | OpenAPIV3.SchemaObject + | undefined; + + if (!schema) { + return null; + } + + const resourceTitle = resource.name || resourceName; + + return { + resourceName, + resourceTitle, + schemaName, + schemaRef, + schema, + subresourcePath: subresourcePath.length > 0 ? subresourcePath : null, + }; +} + +// ============================================================================ +// Resource Overview Data Loader +// ============================================================================ + +/** + * Load data for a resource overview page. + * Includes list of methods, schemas, and subresources with summary info. + */ +async function getResourceOverviewData( + specName: SpecName, + resourceName: string, + subresourcePath: string[] = [], +): Promise { + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const resource = stainlessSpec.resources[resourceName]; + if (!resource) { + return null; + } + + // Navigate to the target resource (may be a subresource) + const targetResource = + subresourcePath.length > 0 + ? getSubresource(resource, subresourcePath) + : resource; + + if (!targetResource) { + return null; + } + + // Build list of methods with summary info + const methods: MethodSummary[] = Object.entries( + targetResource.methods || {}, + ).map(([methodName, config]) => { + const [methodType, endpoint] = resolveEndpoint(config); + const operation = openApiSpec.paths?.[endpoint]?.[ + methodType as keyof OpenAPIV3.PathItemObject + ] as OpenAPIV3.OperationObject | undefined; + return { + methodName, + methodType, + endpoint, + summary: operation?.summary || methodName, + }; + }); + + // Build list of schemas with name/title + const schemas: SchemaSummary[] = Object.entries( + targetResource.models || {}, + ).map(([schemaName, ref]) => { + const schema = JSONPointer.get(openApiSpec, ref.replace("#", "")) as + | OpenAPIV3.SchemaObject + | undefined; + return { + schemaName, + title: schema?.title || schemaName, + }; + }); + + // Build subresource info + const subresources: SubresourceSummary[] = Object.entries( + targetResource.subresources || {}, + ).map(([subName, subResource]) => ({ + name: subName, + title: subResource.name || subName, + methodCount: Object.keys(subResource.methods || {}).length, + })); + + return { + resourceName, + resource: { + name: targetResource.name || null, + description: targetResource.description || null, + }, + methods, + schemas, + subresources, + }; +} + +// ============================================================================ +// Path Generation for Static Paths +// ============================================================================ + +type ApiReferencePath = { + params: { + resource: string; + slug?: string[]; + }; +}; + +/** + * Generate all static paths for API reference pages. + * Used by getStaticPaths to generate all method, schema, and subresource pages. + */ +async function getAllApiReferencePaths( + specName: SpecName, +): Promise { + const stainlessSpec = await readStainlessSpec(specName); + const paths: ApiReferencePath[] = []; + + function processResource( + resource: StainlessResource, + resourceName: string, + parentSlug: string[] = [], + ) { + // Resource overview (no slug for top-level, has slug for subresources) + if (parentSlug.length === 0) { + paths.push({ params: { resource: resourceName } }); + } else { + // Subresource overview + paths.push({ + params: { + resource: resourceName, + slug: parentSlug, + }, + }); + } + + // Method pages + if (resource.methods) { + Object.keys(resource.methods).forEach((methodName) => { + paths.push({ + params: { + resource: resourceName, + slug: [...parentSlug, methodName], + }, + }); + }); + } + + // Schema pages + if (resource.models) { + Object.keys(resource.models).forEach((schemaName) => { + paths.push({ + params: { + resource: resourceName, + slug: [...parentSlug, "schemas", schemaName], + }, + }); + }); + } + + // Subresources (recursive) + if (resource.subresources) { + Object.entries(resource.subresources).forEach( + ([subName, subResource]) => { + // Process subresource methods, schemas, and nested subresources + processResource(subResource, resourceName, [...parentSlug, subName]); + }, + ); + } + } + + Object.entries(stainlessSpec.resources).forEach( + ([resourceName, resource]) => { + processResource(resource, resourceName); + }, + ); + + return paths; +} + +// ============================================================================ +// Sidebar Data Loader +// ============================================================================ + +/** + * Build sidebar pages for a resource (recursively handles subresources) + */ +function buildResourceSidebarPages( + resource: StainlessResource, + openApiSpec: OpenAPIV3.Document, + pathPrefix: string, +): SidebarPage[] { + const pages: SidebarPage[] = []; + + // Methods + if (resource.methods) { + Object.entries(resource.methods).forEach(([methodName, methodConfig]) => { + const [methodType, endpoint] = resolveEndpoint(methodConfig); + const operation = openApiSpec.paths?.[endpoint]?.[ + methodType as keyof OpenAPIV3.PathItemObject + ] as OpenAPIV3.OperationObject | undefined; + + pages.push({ + slug: `${pathPrefix}/${methodName}`, + title: operation?.summary || methodName, + }); + }); + } + + // Subresources + if (resource.subresources) { + Object.entries(resource.subresources).forEach(([subName, subResource]) => { + const subPages = buildResourceSidebarPages( + subResource, + openApiSpec, + `${pathPrefix}/${subName}`, + ); + + pages.push({ + slug: `${pathPrefix}/${subName}`, + title: subResource.name || subName, + pages: subPages, + }); + }); + } + + // Schemas + if (resource.models && Object.keys(resource.models).length > 0) { + const schemaPages: SidebarPage[] = Object.entries(resource.models).map( + ([schemaName, schemaRef]) => { + const schema = JSONPointer.get( + openApiSpec, + schemaRef.replace("#", ""), + ) as OpenAPIV3.SchemaObject | undefined; + + return { + slug: `${pathPrefix}/schemas/${schemaName}`, + title: schema?.title || schemaName, + }; + }, + ); + + pages.push({ + slug: `${pathPrefix}/schemas`, + title: "Object definitions", + pages: schemaPages, + }); + } + + return pages; +} + +/** + * Load sidebar structure for navigation. + * Includes links to all resources, methods, and schemas. + */ +async function getSidebarData(specName: SpecName): Promise { + // Return cached result if available + if (sidebarDataCache[specName]) { + return sidebarDataCache[specName]; + } + + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const basePath = specName === "api" ? "/api-reference" : "/mapi-reference"; + + const resources: SidebarSection[] = Object.entries( + stainlessSpec.resources, + ).map(([resourceName, resource]) => { + const pathPrefix = `${basePath}/${resourceName}`; + + return { + title: resource.name || resourceName, + slug: pathPrefix, + pages: buildResourceSidebarPages(resource, openApiSpec, pathPrefix), + }; + }); + + const result = { resources }; + sidebarDataCache[specName] = result; + return result; +} + +// ============================================================================ +// Schema References Builder +// ============================================================================ + +/** + * Build a map of schema names to their URL paths. + * Used for cross-linking schemas in method documentation. + */ +function buildSchemaReferencesForResource( + resource: StainlessResource, + openApiSpec: OpenAPIV3.Document, + basePath: string, +): Record { + const schemaReferences: Record = {}; + + if (resource.models) { + Object.entries(resource.models).forEach(([modelName, modelRef]) => { + const schema = JSONPointer.get(openApiSpec, modelRef.replace("#", "")) as + | OpenAPIV3.SchemaObject + | undefined; + + const title = schema?.title ?? modelName; + + if (schema) { + schemaReferences[title] = `${basePath}/schemas/${modelName}`; + // Also map array types + schemaReferences[`${title}[]`] = `${basePath}/schemas/${modelName}`; + } + }); + } + + if (resource.subresources) { + Object.entries(resource.subresources).forEach( + ([subresourceName, subresource]) => { + Object.assign( + schemaReferences, + buildSchemaReferencesForResource( + subresource, + openApiSpec, + `${basePath}/${subresourceName}`, + ), + ); + }, + ); + } + + return schemaReferences; +} + +/** + * Build complete schema references map for all resources. + */ +async function buildSchemaReferences( + specName: SpecName, +): Promise> { + // Return cached result if available + if (schemaReferencesCache[specName]) { + return schemaReferencesCache[specName]; + } + + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const basePath = specName === "api" ? "/api-reference" : "/mapi-reference"; + const schemaReferences: Record = {}; + + Object.entries(stainlessSpec.resources).forEach( + ([resourceName, resource]) => { + Object.assign( + schemaReferences, + buildSchemaReferencesForResource( + resource, + openApiSpec, + `${basePath}/${resourceName}`, + ), + ); + }, + ); + + schemaReferencesCache[specName] = schemaReferences; + return schemaReferences; +} + +// ============================================================================ +// Full Resource Page Data (for per-resource pages) +// ============================================================================ + +/** + * Data for a full resource page that renders all methods, schemas, and subresources + */ +type FullResourcePageData = { + resourceName: string; + resource: StainlessResource; + openApiSpec: OpenAPIV3.Document; + stainlessConfig: StainlessConfig; + baseUrl: string; + schemaReferences: Record; +}; + +/** + * Load all data needed to render a full resource page. + * This includes the resource definition and the OpenAPI spec for resolving operations/schemas. + */ +async function getFullResourcePageData( + specName: SpecName, + resourceName: string, +): Promise { + const [openApiSpec, stainlessSpec, schemaReferences] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + buildSchemaReferences(specName), + ]); + + const resource = stainlessSpec.resources[resourceName]; + + if (!resource) { + return null; + } + + const baseUrl = stainlessSpec.environments["production"] || ""; + + return { + resourceName, + resource, + openApiSpec, + stainlessConfig: stainlessSpec, + baseUrl, + schemaReferences, + }; +} + +// ============================================================================ +// Split Resource Data (optimized per-resource data from pre-built files) +// ============================================================================ + +/** + * Pre-split resource data loaded from generated JSON files. + * Contains only the data needed to render a single resource page, + * significantly smaller than FullResourcePageData. + */ +type SplitResourceData = { + resourceName: string; + resource: StainlessResource; + paths: Record; + schemas: Record; + schemaReferences: Record; + baseUrl: string; +}; + +/** + * Load pre-split resource data from generated JSON file. + * These files are created by scripts/splitOpenApiSpec.ts during build. + * + * Falls back to getFullResourcePageData if the split file doesn't exist + * (e.g., during development before running split-specs). + */ +async function getSplitResourceData( + specName: SpecName, + resourceName: string, +): Promise { + const filePath = `./data/specs/${specName}/resources/${resourceName}.json`; + + try { + const data = await readFile(filePath, "utf8"); + return JSON.parse(data) as SplitResourceData; + } catch { + // File doesn't exist, likely running in dev without split-specs + console.warn( + `Split resource file not found: ${filePath}. Run 'yarn split-specs' to generate.`, + ); + return null; + } +} + +// ============================================================================ +// Exports +// ============================================================================ + +export type { + StainlessResource, + StainlessResourceMethod, + StainlessConfig, + MethodPageData, + SchemaPageData, + ResourceOverviewData, + MethodSummary, + SchemaSummary, + SubresourceSummary, + SidebarPage, + SidebarSection, + SidebarData, + ApiReferencePath, + SpecName, + FullResourcePageData, + SplitResourceData, +}; + +export { + // Existing exports + readOpenApiSpec, + readStainlessSpec, + // New helper + resolveEndpoint, + // New page data loaders + getMethodPageData, + getSchemaPageData, + getResourceOverviewData, + getFullResourcePageData, + getSplitResourceData, + // Path generation + getAllApiReferencePaths, + getResourceOrder, + // Sidebar data + getSidebarData, + // Schema references + buildSchemaReferences, +}; diff --git a/next.config.js b/next.config.js index 97debb24a..1466632b2 100644 --- a/next.config.js +++ b/next.config.js @@ -623,11 +623,12 @@ const nextConfig = { destination: "/in-app-ui/message-types/overview", permanent: true, }, - { - source: "/cli", - destination: "/cli/overview", - permanent: false, - }, + // { + // source: "/cli", + // destination: "/cli/overview", + // permanent: false, + // }, + // Redirect /api-reference to /api-reference/overview { source: "/api-reference", destination: "/api-reference/overview", @@ -694,25 +695,43 @@ const nextConfig = { destination: "/version-control/environments", permanent: true, }, - ]; - }, - - async rewrites() { - return [ - // API reference pages all serve the same static content - // The URL paths are used for client-side navigation to sections + // CLI old paths redirects { - source: "/api-reference/:path+", - destination: "/api-reference", + source: "/cli/login", + destination: "/cli/authentication/login", + permanent: true, }, { - source: "/mapi-reference/:path+", - destination: "/mapi-reference", + source: "/cli/logout", + destination: "/cli/authentication/logout", + permanent: true, + }, + { + source: "/cli/init", + destination: "/cli/resources/init", + permanent: true, }, - // CLI reference pages all serve the same static content { - source: "/cli/:path+", - destination: "/cli", + source: "/cli/pull", + destination: "/cli/resources/pull", + permanent: true, + }, + { + source: "/cli/push", + destination: "/cli/resources/push", + permanent: true, + }, + // CLI overview section redirects (old paths without /overview prefix) + { + source: "/cli/overview/introduction", + destination: "/cli/overview", + permanent: true, + }, + // CLI branch overview redirect + { + source: "/cli/branch/overview", + destination: "/cli/branch", + permanent: true, }, ]; }, diff --git a/package.json b/package.json index 7446fb8d6..2a84ea312 100644 --- a/package.json +++ b/package.json @@ -17,8 +17,9 @@ "generate-reference-md": "tsx scripts/generateApiMarkdown.ts", "index-apis": "tsx scripts/indexApisForSearch.ts", "open-api-to-md": "bash scripts/openApiToMd.sh", - "predev": "yarn generate-llms", - "prebuild": "yarn generate-llms && yarn index-apis" + "split-specs": "tsx scripts/splitOpenApiSpec.ts", + "predev": "yarn split-specs && yarn generate-llms", + "prebuild": "yarn split-specs && yarn generate-llms && yarn index-apis" }, "dependencies": { "@algolia/autocomplete-js": "^1.6.3", diff --git a/pages/[...slug].tsx b/pages/[...slug].tsx index bfc960fdd..ea84eefe1 100644 --- a/pages/[...slug].tsx +++ b/pages/[...slug].tsx @@ -106,18 +106,21 @@ export const getStaticPaths = async () => { const filePaths = getAllFilesInDir(CONTENT_DIR, [], DOCS_FILE_EXTENSIONS); // Format the slug to generate the correct path - const paths = filePaths.map((path) => { - const slug = path - .replace(CONTENT_DIR, "") - .replace(/\.mdx?$/, "") - .split(sep); + const paths = filePaths + .map((path) => { + const slug = path + .replace(CONTENT_DIR, "") + .replace(/\.mdx?$/, "") + .split(sep); - return { - params: { - slug, - }, - }; - }); + return { + params: { + slug, + }, + }; + }) + // Exclude CLI paths - these are handled by /pages/cli/[resource]/[[...slug]].tsx + .filter(({ params: { slug } }) => slug[0] !== "cli"); return { paths, diff --git a/pages/_app.tsx b/pages/_app.tsx index accf1464d..82785a784 100644 --- a/pages/_app.tsx +++ b/pages/_app.tsx @@ -19,10 +19,11 @@ import "@algolia/autocomplete-theme-classic"; import "../styles/index.css"; import "../styles/global.css"; import "../styles/responsive.css"; +import App, { AppContext, AppInitialProps, AppProps } from "next/app"; const inter = Inter({ subsets: ["latin"], display: "swap" }); -function App({ Component, pageProps }) { +function MyApp({ Component, pageProps }) { const router = useRouter(); const eventEmitter = useEventEmitterInstance(); @@ -49,8 +50,8 @@ function App({ Component, pageProps }) { }, [router.events]); return ( - - + +
@@ -63,4 +64,11 @@ function App({ Component, pageProps }) { ); } -export default App; +MyApp.getInitialProps = async ( + context: AppContext, +): Promise => { + const ctx = await App.getInitialProps(context); + return { ...ctx }; +}; + +export default MyApp; diff --git a/pages/api-reference/[resource]/[[...slug]].tsx b/pages/api-reference/[resource]/[[...slug]].tsx new file mode 100644 index 000000000..de8c19c4b --- /dev/null +++ b/pages/api-reference/[resource]/[[...slug]].tsx @@ -0,0 +1,97 @@ +import { GetStaticPaths, GetStaticProps } from "next"; +import { + getSplitResourceData, + getSidebarData, + SplitResourceData, + SidebarData, + getAllApiReferencePaths, +} from "@/lib/openApiSpec"; +import { ApiReferenceLayout } from "@/components/api-reference"; +import { ResourceFullPage } from "@/components/api-reference"; +import { API_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/apiOverviewSidebar"; + +interface ResourcePageProps { + sidebarData: SidebarData; + resourceData: SplitResourceData; +} + +export default function ResourcePage({ + sidebarData, + resourceData, +}: ResourcePageProps) { + // Guard against undefined resourceData during client-side transitions + if (!resourceData) { + return null; + } + + const basePath = `/api-reference/${resourceData.resourceName}`; + const title = resourceData.resource.name || resourceData.resourceName; + const description = `Complete reference documentation for the ${title} resource.`; + + return ( + + + + ); +} + +export const getStaticPaths: GetStaticPaths = async () => { + // Generate all paths including deep links (methods, schemas, subresources) + // This ensures client-side navigation works properly since each path + // is an actual page with its own JSON data file. + const allPaths = await getAllApiReferencePaths("api"); + + const paths = allPaths.map((p) => ({ + params: { + resource: p.params.resource, + // slug is optional - undefined for resource root, array for deep paths + slug: p.params.slug, + }, + })); + + return { + paths, + fallback: false, + }; +}; + +export const getStaticProps: GetStaticProps = async ({ + params, +}) => { + // Extract resource name - the first segment determines which data to load + const resourceName = Array.isArray(params?.resource) + ? params.resource[0] + : params?.resource; + + if (!resourceName) { + return { notFound: true }; + } + + // All deep paths (methods, schemas, subresources) render the same resource page + // The page handles scrolling to the correct section based on the URL + const [sidebarData, resourceData] = await Promise.all([ + getSidebarData("api"), + getSplitResourceData("api", resourceName), + ]); + + if (!resourceData) { + return { notFound: true }; + } + + return { + props: { + sidebarData, + resourceData, + }, + revalidate: 3600, // Revalidate every hour + }; +}; diff --git a/pages/api-reference/index.tsx b/pages/api-reference/index.tsx index cf91a92c2..2a0961597 100644 --- a/pages/api-reference/index.tsx +++ b/pages/api-reference/index.tsx @@ -1,40 +1,50 @@ import fs from "fs"; -import { MDXRemote } from "next-mdx-remote"; +import { GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; import rehypeMdxCodeProps from "rehype-mdx-code-props"; import { serialize } from "next-mdx-remote/serialize"; import remarkGfm from "remark-gfm"; +import { Box } from "@telegraph/layout"; -import { readOpenApiSpec, readStainlessSpec } from "@/lib/openApiSpec"; +import { getSidebarData, SidebarData } from "@/lib/openApiSpec"; import { CONTENT_DIR } from "@/lib/content.server"; import { MDX_COMPONENTS } from "@/lib/mdxComponents"; -import ApiReference from "@/components/ui/ApiReference/ApiReference"; -import { - RESOURCE_ORDER, - API_REFERENCE_OVERVIEW_CONTENT, -} from "@/data/sidebars/apiOverviewSidebar"; +import { ApiReferenceLayout } from "@/components/api-reference"; +import { API_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/apiOverviewSidebar"; -function ApiReferencePage({ openApiSpec, stainlessSpec, preContentMdx }) { +interface ApiReferenceOverviewProps { + sidebarData: SidebarData; + overviewContentMdx: MDXRemoteSerializeResult; +} + +function ApiReferenceOverview({ + sidebarData, + overviewContentMdx, +}: ApiReferenceOverviewProps) { return ( - } - resourceOrder={RESOURCE_ORDER} + + title="API reference" + description="Complete reference documentation for the Knock API." + > + + + + ); } -export async function getStaticProps() { - const openApiSpec = await readOpenApiSpec("api"); - const stainlessSpec = await readStainlessSpec("api"); +export const getStaticProps: GetStaticProps< + ApiReferenceOverviewProps +> = async () => { + const sidebarData = await getSidebarData("api"); - const preContent = fs.readFileSync( + const overviewContent = fs.readFileSync( `${CONTENT_DIR}/__api-reference/content.mdx`, ); - const preContentMdx = await serialize(preContent.toString(), { + const overviewContentMdx = await serialize(overviewContent.toString(), { parseFrontmatter: true, mdxOptions: { remarkPlugins: [remarkGfm], @@ -42,7 +52,13 @@ export async function getStaticProps() { }, }); - return { props: { openApiSpec, stainlessSpec, preContentMdx } }; -} + return { + props: { + sidebarData, + overviewContentMdx, + }, + revalidate: 3600, // Revalidate every hour + }; +}; -export default ApiReferencePage; +export default ApiReferenceOverview; diff --git a/pages/api-reference/overview/[[...section]].tsx b/pages/api-reference/overview/[[...section]].tsx new file mode 100644 index 000000000..7ac7a9e9f --- /dev/null +++ b/pages/api-reference/overview/[[...section]].tsx @@ -0,0 +1,97 @@ +import fs from "fs"; +import { GetStaticPaths, GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; +import rehypeMdxCodeProps from "rehype-mdx-code-props"; +import { serialize } from "next-mdx-remote/serialize"; +import remarkGfm from "remark-gfm"; +import { Box } from "@telegraph/layout"; + +import { getSidebarData, SidebarData } from "@/lib/openApiSpec"; +import { CONTENT_DIR } from "@/lib/content.server"; +import { MDX_COMPONENTS } from "@/lib/mdxComponents"; +import { ApiReferenceLayout } from "@/components/api-reference"; +import { API_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/apiOverviewSidebar"; + +interface ApiReferenceOverviewProps { + sidebarData: SidebarData; + overviewContentMdx: MDXRemoteSerializeResult; +} + +function ApiReferenceOverview({ + sidebarData, + overviewContentMdx, +}: ApiReferenceOverviewProps) { + return ( + + + + + + ); +} + +export const getStaticPaths: GetStaticPaths = async () => { + // Generate paths for all overview sections from the sidebar config + const overviewPages = API_REFERENCE_OVERVIEW_CONTENT[0]?.pages || []; + + const paths = [ + // Base overview path (no section) + { params: { section: [] } }, + // All section paths + ...overviewPages.map((page) => ({ + params: { + section: page.slug === "/" ? [] : [page.slug.replace(/^\//, "")], + }, + })), + ]; + + // Remove duplicates (the "/" slug creates a duplicate of the base path) + const uniquePaths = paths.filter( + (path, index, self) => + index === + self.findIndex( + (p) => + JSON.stringify(p.params.section) === + JSON.stringify(path.params.section), + ), + ); + + return { + paths: uniquePaths, + fallback: false, + }; +}; + +export const getStaticProps: GetStaticProps< + ApiReferenceOverviewProps +> = async () => { + const sidebarData = await getSidebarData("api"); + + const overviewContent = fs.readFileSync( + `${CONTENT_DIR}/__api-reference/content.mdx`, + ); + + const overviewContentMdx = await serialize(overviewContent.toString(), { + parseFrontmatter: true, + mdxOptions: { + remarkPlugins: [remarkGfm], + rehypePlugins: [rehypeMdxCodeProps], + }, + }); + + return { + props: { + sidebarData, + overviewContentMdx, + }, + revalidate: 3600, // Revalidate every hour + }; +}; + +export default ApiReferenceOverview; diff --git a/pages/cli/[...slug].tsx b/pages/cli/[...slug].tsx deleted file mode 100644 index bc8674f9d..000000000 --- a/pages/cli/[...slug].tsx +++ /dev/null @@ -1,91 +0,0 @@ -import fs from "fs"; -import { MDXRemote } from "next-mdx-remote"; -import { serialize } from "next-mdx-remote/serialize"; -import rehypeMdxCodeProps from "rehype-mdx-code-props"; -import remarkGfm from "remark-gfm"; - -import { SidebarContent } from "@/data/types"; -import { MDX_COMPONENTS } from "@/lib/mdxComponents"; -import datadogDashboardJson from "../../content/integrations/extensions/datadog_dashboard.json"; -import newRelicDashboardJson from "../../content/integrations/extensions/new_relic_dashboard.json"; -import eventPayload from "../../data/code/sources/eventPayload"; -import MDXLayout from "../../layouts/MDXLayout"; -import { CONTENT_DIR } from "../../lib/content.server"; - -import { CLI_SIDEBAR } from "@/data/sidebars/cliSidebar"; - -let cachedCliPageComponent; - -function CachedCliPageComponent({ source, sourcePath }) { - if (!cachedCliPageComponent) { - cachedCliPageComponent = ( - - - - ); - } - return cachedCliPageComponent; -} - -// Get the props for a single path -export async function getStaticProps() { - const sourcePath = `${CONTENT_DIR}__cli/content.mdx`; - - // Read the source content file, checking for .mdx and .md files - const preContent = fs.readFileSync(sourcePath); - - const mdx = await serialize(preContent.toString(), { - parseFrontmatter: true, - mdxOptions: { - remarkPlugins: [remarkGfm], - rehypePlugins: [rehypeMdxCodeProps], - }, - }); - - return { props: { source: mdx, sourcePath } }; -} - -export async function getStaticPaths() { - const paths: { params: { slug: string[] } }[] = []; - const pages: SidebarContent[] = CLI_SIDEBAR; - - for (const page of pages) { - const slug = page.slug.split("/").pop() as string; - paths.push({ params: { slug: [slug] } }); - - for (const subPage of page.pages ?? []) { - paths.push({ - params: { slug: [slug, subPage.slug.replace("/", "")] }, - }); - - if ("pages" in subPage) { - for (const subSubPage of subPage.pages ?? []) { - paths.push({ - params: { - slug: [ - slug, - subPage.slug.replace("/", ""), - subSubPage.slug.replace("/", ""), - ], - }, - }); - } - } - } - } - - return { - paths, - fallback: false, - }; -} - -export default CachedCliPageComponent; diff --git a/pages/cli/[resource]/[[...slug]].tsx b/pages/cli/[resource]/[[...slug]].tsx new file mode 100644 index 000000000..473b8d4e9 --- /dev/null +++ b/pages/cli/[resource]/[[...slug]].tsx @@ -0,0 +1,127 @@ +import fs from "fs"; +import { GetStaticPaths, GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; +import { serialize } from "next-mdx-remote/serialize"; +import rehypeMdxCodeProps from "rehype-mdx-code-props"; +import remarkGfm from "remark-gfm"; + +import { MDX_COMPONENTS } from "@/lib/mdxComponents"; +import { CLI_SIDEBAR } from "@/data/sidebars/cliSidebar"; +import { CONTENT_DIR } from "@/lib/content.server"; +import { CliReferenceLayout } from "@/layouts/CliReferenceLayout"; +import AiChatButton from "@/components/AiChatButton"; + +interface CliPageProps { + source: MDXRemoteSerializeResult; + frontmatter: { + title: string; + description: string; + }; + sourcePath: string; +} + +export default function CliResourcePage({ + source, + frontmatter, + sourcePath, +}: CliPageProps) { + return ( + + + + + ); +} + +function getAllCliPaths(): { resource: string; slug?: string[] }[] { + const paths: { resource: string; slug?: string[] }[] = []; + + for (const section of CLI_SIDEBAR) { + // Extract resource name from the section slug (e.g., "/cli/workflow" -> "workflow") + const resourceMatch = section.slug.match(/^\/cli\/(.+)$/); + if (!resourceMatch) continue; + + const resource = resourceMatch[1]; + + // Add the resource root path + paths.push({ resource, slug: undefined }); + + // Add all subpages - they all render the same resource page + for (const page of section.pages || []) { + if (page.slug === "/" || page.slug === "") { + // Root page is already added above + continue; + } + + const pageSlug = page.slug.replace(/^\//, ""); + paths.push({ resource, slug: [pageSlug] }); + + // Handle nested pages if any + if ("pages" in page && page.pages) { + for (const subPage of page.pages) { + const subPageSlug = subPage.slug.replace(/^\//, ""); + paths.push({ resource, slug: [pageSlug, subPageSlug] }); + } + } + } + } + + return paths; +} + +export const getStaticPaths: GetStaticPaths = async () => { + const allPaths = getAllCliPaths(); + + const paths = allPaths.map((p) => ({ + params: { + resource: p.resource, + slug: p.slug, + }, + })); + + return { + paths, + fallback: false, + }; +}; + +export const getStaticProps: GetStaticProps = async ({ + params, +}) => { + const resource = params?.resource as string; + + // All subpaths render the same resource page (single page per resource) + // The page content contains Section components for scroll-to-section + const contentPath = `${CONTENT_DIR}/cli/${resource}.mdx`; + + // Check if file exists + if (!fs.existsSync(contentPath)) { + return { notFound: true }; + } + + const fileContent = fs.readFileSync(contentPath, "utf-8"); + + const mdx = await serialize(fileContent, { + parseFrontmatter: true, + mdxOptions: { + remarkPlugins: [remarkGfm], + rehypePlugins: [rehypeMdxCodeProps], + }, + }); + + const frontmatter = (mdx.frontmatter || {}) as { + title: string; + description: string; + }; + + return { + props: { + source: mdx, + frontmatter: { + title: frontmatter.title || resource, + description: frontmatter.description || "", + }, + sourcePath: contentPath, + }, + }; +}; diff --git a/pages/cli/index.tsx b/pages/cli/index.tsx index 096359e6a..7943aab00 100644 --- a/pages/cli/index.tsx +++ b/pages/cli/index.tsx @@ -1,40 +1,44 @@ import fs from "fs"; -import { MDXRemote } from "next-mdx-remote"; +import { GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; import { serialize } from "next-mdx-remote/serialize"; import rehypeMdxCodeProps from "rehype-mdx-code-props"; import remarkGfm from "remark-gfm"; import { MDX_COMPONENTS } from "@/lib/mdxComponents"; -import AiChatButton from "../../components/AiChatButton"; -import datadogDashboardJson from "../../content/integrations/extensions/datadog_dashboard.json"; -import newRelicDashboardJson from "../../content/integrations/extensions/new_relic_dashboard.json"; -import eventPayload from "../../data/code/sources/eventPayload"; -import MDXLayout from "../../layouts/MDXLayout"; -import { CONTENT_DIR } from "../../lib/content.server"; - -function CliPage({ source, sourcePath }) { +import { CONTENT_DIR } from "@/lib/content.server"; +import { CliReferenceLayout } from "@/layouts/CliReferenceLayout"; +import AiChatButton from "@/components/AiChatButton"; + +interface CliIndexPageProps { + source: MDXRemoteSerializeResult; + frontmatter: { + title: string; + description: string; + }; + sourcePath: string; +} + +export default function CliIndexPage({ + source, + frontmatter, + sourcePath, +}: CliIndexPageProps) { return ( - - + + - + ); } -export async function getStaticProps() { - const sourcePath = `${CONTENT_DIR}/__cli/content.mdx`; +export const getStaticProps: GetStaticProps = async () => { + // Render the overview content at the /cli index + const contentPath = `${CONTENT_DIR}/cli/overview.mdx`; - const preContent = fs.readFileSync(sourcePath); + const fileContent = fs.readFileSync(contentPath, "utf-8"); - const mdx = await serialize(preContent.toString(), { + const mdx = await serialize(fileContent, { parseFrontmatter: true, mdxOptions: { remarkPlugins: [remarkGfm], @@ -42,7 +46,21 @@ export async function getStaticProps() { }, }); - return { props: { source: mdx, sourcePath } }; -} + const frontmatter = (mdx.frontmatter || {}) as { + title: string; + description: string; + }; -export default CliPage; + return { + props: { + source: mdx, + frontmatter: { + title: frontmatter.title || "CLI reference", + description: + frontmatter.description || + "Learn more about the commands and flags available in the Knock CLI.", + }, + sourcePath: contentPath, + }, + }; +}; diff --git a/pages/index.tsx b/pages/index.tsx index f48540c17..f582d1132 100644 --- a/pages/index.tsx +++ b/pages/index.tsx @@ -71,7 +71,7 @@ export default function Home() { diff --git a/pages/mapi-reference/[resource]/[[...slug]].tsx b/pages/mapi-reference/[resource]/[[...slug]].tsx new file mode 100644 index 000000000..a4ef1e371 --- /dev/null +++ b/pages/mapi-reference/[resource]/[[...slug]].tsx @@ -0,0 +1,99 @@ +import { GetStaticPaths, GetStaticProps } from "next"; +import { + getSplitResourceData, + getSidebarData, + SplitResourceData, + SidebarData, + getAllApiReferencePaths, +} from "@/lib/openApiSpec"; +import { ApiReferenceLayout } from "@/components/api-reference"; +import { ResourceFullPage } from "@/components/api-reference"; +import { MAPI_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/mapiOverviewSidebar"; + +interface ResourcePageProps { + sidebarData: SidebarData; + resourceData: SplitResourceData; +} + +export default function ResourcePage({ + sidebarData, + resourceData, +}: ResourcePageProps) { + // Guard against undefined resourceData during client-side transitions + if (!resourceData) { + return null; + } + + const basePath = `/mapi-reference/${resourceData.resourceName}`; + const title = resourceData.resource.name || resourceData.resourceName; + const description = `Complete reference documentation for the ${title} resource.`; + + return ( + + + + ); +} + +export const getStaticPaths: GetStaticPaths = async () => { + // Generate all paths including deep links (methods, schemas, subresources) + // This ensures client-side navigation works properly since each path + // is an actual page with its own JSON data file. + const allPaths = await getAllApiReferencePaths("mapi"); + + const paths = allPaths.map((p) => ({ + params: { + resource: p.params.resource, + // slug is optional - undefined for resource root, array for deep paths + slug: p.params.slug, + }, + })); + + return { + paths, + fallback: false, + }; +}; + +export const getStaticProps: GetStaticProps = async ({ + params, +}) => { + // Extract resource name - the first segment determines which data to load + const resourceName = Array.isArray(params?.resource) + ? params.resource[0] + : params?.resource; + + if (!resourceName) { + return { notFound: true }; + } + + // All deep paths (methods, schemas, subresources) render the same resource page + // The page handles scrolling to the correct section based on the URL + const [sidebarData, resourceData] = await Promise.all([ + getSidebarData("mapi"), + getSplitResourceData("mapi", resourceName), + ]); + + if (!resourceData) { + return { notFound: true }; + } + + return { + props: { + sidebarData, + resourceData, + }, + revalidate: 3600, // Revalidate every hour + }; +}; diff --git a/pages/mapi-reference/index.tsx b/pages/mapi-reference/index.tsx index c381a6486..ae7925df4 100644 --- a/pages/mapi-reference/index.tsx +++ b/pages/mapi-reference/index.tsx @@ -1,44 +1,50 @@ import fs from "fs"; -import { MDXRemote } from "next-mdx-remote"; +import { GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; import rehypeMdxCodeProps from "rehype-mdx-code-props"; import { serialize } from "next-mdx-remote/serialize"; import remarkGfm from "remark-gfm"; +import { Box } from "@telegraph/layout"; -import { readOpenApiSpec, readStainlessSpec } from "../../lib/openApiSpec"; -import ApiReference from "../../components/ui/ApiReference/ApiReference"; -import { CONTENT_DIR } from "../../lib/content.server"; +import { getSidebarData, SidebarData } from "@/lib/openApiSpec"; +import { CONTENT_DIR } from "@/lib/content.server"; import { MDX_COMPONENTS } from "@/lib/mdxComponents"; -import { - MAPI_REFERENCE_OVERVIEW_CONTENT, - RESOURCE_ORDER, -} from "../../data/sidebars/mapiOverviewSidebar"; - -function ManagementApiReferenceNew({ - openApiSpec, - stainlessSpec, - preContentMdx, -}) { +import { ApiReferenceLayout } from "@/components/api-reference"; +import { MAPI_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/mapiOverviewSidebar"; + +interface MapiReferenceOverviewProps { + sidebarData: SidebarData; + overviewContentMdx: MDXRemoteSerializeResult; +} + +function MapiReferenceOverview({ + sidebarData, + overviewContentMdx, +}: MapiReferenceOverviewProps) { return ( - } - resourceOrder={RESOURCE_ORDER} + + title="Management API reference" + description="Complete reference documentation for the Knock Management API." + > + + + + ); } -export async function getStaticProps() { - const openApiSpec = await readOpenApiSpec("mapi"); - const stainlessSpec = await readStainlessSpec("mapi"); +export const getStaticProps: GetStaticProps< + MapiReferenceOverviewProps +> = async () => { + const sidebarData = await getSidebarData("mapi"); - const preContent = fs.readFileSync( + const overviewContent = fs.readFileSync( `${CONTENT_DIR}/__mapi-reference/content.mdx`, ); - const preContentMdx = await serialize(preContent.toString(), { + const overviewContentMdx = await serialize(overviewContent.toString(), { parseFrontmatter: true, mdxOptions: { remarkPlugins: [remarkGfm], @@ -46,7 +52,13 @@ export async function getStaticProps() { }, }); - return { props: { openApiSpec, stainlessSpec, preContentMdx } }; -} + return { + props: { + sidebarData, + overviewContentMdx, + }, + revalidate: 3600, // Revalidate every hour + }; +}; -export default ManagementApiReferenceNew; +export default MapiReferenceOverview; diff --git a/pages/mapi-reference/overview/[[...section]].tsx b/pages/mapi-reference/overview/[[...section]].tsx new file mode 100644 index 000000000..68633486a --- /dev/null +++ b/pages/mapi-reference/overview/[[...section]].tsx @@ -0,0 +1,97 @@ +import fs from "fs"; +import { GetStaticPaths, GetStaticProps } from "next"; +import { MDXRemote, MDXRemoteSerializeResult } from "next-mdx-remote"; +import rehypeMdxCodeProps from "rehype-mdx-code-props"; +import { serialize } from "next-mdx-remote/serialize"; +import remarkGfm from "remark-gfm"; +import { Box } from "@telegraph/layout"; + +import { getSidebarData, SidebarData } from "@/lib/openApiSpec"; +import { CONTENT_DIR } from "@/lib/content.server"; +import { MDX_COMPONENTS } from "@/lib/mdxComponents"; +import { ApiReferenceLayout } from "@/components/api-reference"; +import { MAPI_REFERENCE_OVERVIEW_CONTENT } from "@/data/sidebars/mapiOverviewSidebar"; + +interface MapiReferenceOverviewProps { + sidebarData: SidebarData; + overviewContentMdx: MDXRemoteSerializeResult; +} + +function MapiReferenceOverview({ + sidebarData, + overviewContentMdx, +}: MapiReferenceOverviewProps) { + return ( + + + + + + ); +} + +export const getStaticPaths: GetStaticPaths = async () => { + // Generate paths for all overview sections from the sidebar config + const overviewPages = MAPI_REFERENCE_OVERVIEW_CONTENT[0]?.pages || []; + + const paths = [ + // Base overview path (no section) + { params: { section: [] } }, + // All section paths + ...overviewPages.map((page) => ({ + params: { + section: page.slug === "/" ? [] : [page.slug.replace(/^\//, "")], + }, + })), + ]; + + // Remove duplicates (the "/" slug creates a duplicate of the base path) + const uniquePaths = paths.filter( + (path, index, self) => + index === + self.findIndex( + (p) => + JSON.stringify(p.params.section) === + JSON.stringify(path.params.section), + ), + ); + + return { + paths: uniquePaths, + fallback: false, + }; +}; + +export const getStaticProps: GetStaticProps< + MapiReferenceOverviewProps +> = async () => { + const sidebarData = await getSidebarData("mapi"); + + const overviewContent = fs.readFileSync( + `${CONTENT_DIR}/__mapi-reference/content.mdx`, + ); + + const overviewContentMdx = await serialize(overviewContent.toString(), { + parseFrontmatter: true, + mdxOptions: { + remarkPlugins: [remarkGfm], + rehypePlugins: [rehypeMdxCodeProps], + }, + }); + + return { + props: { + sidebarData, + overviewContentMdx, + }, + revalidate: 3600, // Revalidate every hour + }; +}; + +export default MapiReferenceOverview; diff --git a/scripts/generateApiMarkdown.ts b/scripts/generateApiMarkdown.ts index 575a0100c..6c0dc8e73 100644 --- a/scripts/generateApiMarkdown.ts +++ b/scripts/generateApiMarkdown.ts @@ -31,6 +31,20 @@ async function parseFrontmatter(markdownContent) { return yaml.parse(yamlNode.value); } +// Ensure directory exists +function ensureDir(filePath: string) { + const dir = path.dirname(filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } +} + +// Write markdown file with directory creation +function writeMarkdownFile(filePath: string, content: string) { + ensureDir(filePath); + fs.writeFileSync(filePath, content, "utf-8"); +} + // Clean JSX content to plain markdown function cleanJsxContent(content: string): string { return ( @@ -69,8 +83,19 @@ function cleanJsxContent(content: string): string { ) // Remove Table components (they use JSX) .replace(//g, "") - // Remove Attributes/Attribute components - .replace(/[\s\S]*?<\/Attributes>/g, "") + // Convert Attributes/Attribute components to markdown list + .replace(/([\s\S]*?)<\/Attributes>/g, (_, inner) => { + // Extract individual Attribute components and convert to list items + // Handles both single-line and multi-line formats + const attrRegex = + //g; + let result = ""; + let match; + while ((match = attrRegex.exec(inner)) !== null) { + result += `- **${match[1]}** (${match[2]}): ${match[3]}\n`; + } + return result || ""; + }) // Remove Endpoints/Endpoint components .replace(//g, "") // Remove MultiLangCodeBlock @@ -106,8 +131,10 @@ function parseSectionContent( const sections = new Map(); // Match Section components with their slug/path and content + // Use a more permissive pattern for attributes since titles may contain < and > characters + // We look for the keyAttribute value and then match until
const sectionRegex = new RegExp( - `]*${keyAttribute}="([^"]+)"[^>]*>([\\s\\S]*?)<\\/Section>`, + `([\\s\\S]*?)<\\/Section>`, "g", ); @@ -138,14 +165,20 @@ function loadOverviewContent(apiType: "api" | "mapi"): Map { return parseSectionContent(mdxContent); } -// Main function to generate a single API reference markdown file +// Main function to generate API reference markdown files +// Generates: 1) combined file, 2) per-resource files, 3) per-method files async function generateApiReferenceMarkdownFiles( apiType: "api" | "mapi" = "api", ) { try { - const fileName = + const baseDir = apiType === "api" ? "api-reference" : "mapi-reference"; + const combinedFileName = apiType === "api" ? "api-reference.md" : "mapi-reference.md"; - const filePath = path.join(process.cwd(), "public", fileName); + const combinedFilePath = path.join( + process.cwd(), + "public", + combinedFileName, + ); // Get API specs const openApiSpec = await readOpenApiSpec(apiType); @@ -163,14 +196,14 @@ async function generateApiReferenceMarkdownFiles( resourceOrder: MAPI_RESOURCE_ORDER, }; - let content = `# ${ + let combinedContent = `# ${ apiType === "api" ? "API" : "Management API" } Reference\n\n`; // Load overview content from section-based MDX file const sectionContent = loadOverviewContent(apiType); - // Add overview content sections + // Generate overview section pages for (const section of overviewContent) { if (section.pages) { for (const page of section.pages) { @@ -178,9 +211,23 @@ async function generateApiReferenceMarkdownFiles( const slug = page.slug === "/" ? "overview" : page.slug.slice(1); const pageContent = sectionContent.get(slug) || ""; - content += `## ${page.title}\n\n`; + // Add to combined file + combinedContent += `## ${page.title}\n\n`; + if (pageContent) { + combinedContent += pageContent + "\n\n"; + } + + // Generate individual overview page file if (pageContent) { - content += pageContent + "\n\n"; + const overviewPagePath = path.join( + process.cwd(), + "public", + baseDir, + "overview", + `${slug}.md`, + ); + const overviewPageContent = `# ${page.title}\n\n${pageContent}\n`; + writeMarkdownFile(overviewPagePath, overviewPageContent); } } } @@ -189,60 +236,240 @@ async function generateApiReferenceMarkdownFiles( // Add resource content for (const resourceName of resourceOrder) { const resource = stainlessSpec.resources[resourceName]; + if (!resource) continue; + + // Generate per-resource combined file + let resourceContent = ""; // Add resource overview - content += getResourceOverviewContent( + const resourceOverview = getResourceOverviewContent( resource, resourceName, openApiSpec, ); + combinedContent += resourceOverview; + resourceContent += resourceOverview; // Add method content if (resource.methods) { for (const [methodName, method] of Object.entries(resource.methods)) { - content += getMethodMarkdownContent(methodName, method, openApiSpec); + const methodContent = getMethodMarkdownContent( + methodName, + method, + openApiSpec, + ); + combinedContent += methodContent; + resourceContent += methodContent; + + // Generate individual method page + if (methodContent.trim()) { + const methodPagePath = path.join( + process.cwd(), + "public", + baseDir, + resourceName, + `${methodName}.md`, + ); + writeMarkdownFile(methodPagePath, methodContent); + } } } - // Add subresource content + // Add subresource content (with nested path support) if (resource.subresources) { for (const [subresourceName, subresource] of Object.entries( resource.subresources, )) { - content += getSubresourceMarkdownContent( + const subresourceContent = getSubresourceMarkdownContent( subresourceName, subresource, openApiSpec, ); + combinedContent += subresourceContent; + resourceContent += subresourceContent; + + // Generate individual subresource and method pages + generateSubresourcePages( + baseDir, + resourceName, + [subresourceName], + subresource, + openApiSpec, + ); } } // Add schema content if (resource.models) { for (const [modelName, modelRef] of Object.entries(resource.models)) { - content += getSchemaMarkdownContent( + const schemaContent = getSchemaMarkdownContent( modelName, modelRef as string, openApiSpec, ); + combinedContent += schemaContent; + resourceContent += schemaContent; + + // Generate individual schema page + if (schemaContent.trim()) { + const schemaPagePath = path.join( + process.cwd(), + "public", + baseDir, + resourceName, + "schemas", + `${modelName}.md`, + ); + writeMarkdownFile(schemaPagePath, schemaContent); + } } } + + // Write the per-resource combined file + const resourceFilePath = path.join( + process.cwd(), + "public", + baseDir, + `${resourceName}.md`, + ); + writeMarkdownFile(resourceFilePath, resourceContent); } - fs.writeFileSync(filePath, content, "utf-8"); + // Write the combined file + fs.writeFileSync(combinedFilePath, combinedContent, "utf-8"); console.log( - `✅ ${apiType.toUpperCase()} reference markdown file generated successfully`, + `✅ ${apiType.toUpperCase()} reference markdown files generated successfully`, ); } catch (error) { console.error( - `Error generating ${apiType.toUpperCase()} reference markdown file:`, + `Error generating ${apiType.toUpperCase()} reference markdown files:`, error, ); throw error; } } +// Generate pages for subresources recursively +function generateSubresourcePages( + baseDir: string, + resourceName: string, + subresourcePath: string[], + subresource: any, + openApiSpec: any, +) { + // Build the directory path for this subresource + const subresourceDir = path.join( + process.cwd(), + "public", + baseDir, + resourceName, + ...subresourcePath, + ); + + // Generate index.md for the subresource overview + let indexContent = `# ${ + subresource.name || subresourcePath[subresourcePath.length - 1] + }\n\n`; + + if (subresource.description) { + indexContent += `${subresource.description}\n\n`; + } + + // Add list of available methods + if (subresource.methods && Object.keys(subresource.methods).length > 0) { + indexContent += `## Available endpoints\n\n`; + for (const [methodName, method] of Object.entries(subresource.methods)) { + const [methodType, endpoint] = resolveEndpointFromMethod( + method as string | { endpoint: string }, + ); + const openApiOperation = openApiSpec.paths?.[endpoint]?.[methodType]; + const summary = openApiOperation?.summary || methodName; + indexContent += `- **${methodType.toUpperCase()}** \`${endpoint}\` - ${summary}\n`; + } + indexContent += "\n"; + } + + // Add list of schemas if any + if (subresource.models && Object.keys(subresource.models).length > 0) { + indexContent += `## Object definitions\n\n`; + for (const [modelName, modelRef] of Object.entries(subresource.models)) { + const schema = JSONPointer.get( + openApiSpec, + (modelRef as string).replace("#", ""), + ); + const title = schema?.title || modelName; + indexContent += `- [${title}](./schemas/${modelName}.md)\n`; + } + indexContent += "\n"; + } + + const indexPagePath = path.join(subresourceDir, "index.md"); + writeMarkdownFile(indexPagePath, indexContent); + + // Generate method pages for this subresource + if (subresource.methods) { + for (const [methodName, method] of Object.entries(subresource.methods)) { + const [methodType, endpoint] = resolveEndpointFromMethod( + method as string | { endpoint: string }, + ); + const openApiOperation = openApiSpec.paths?.[endpoint]?.[methodType]; + + if (openApiOperation) { + let methodContent = `### ${openApiOperation.summary || methodName}\n\n`; + + if (openApiOperation.description) { + methodContent += `${openApiOperation.description}\n\n`; + } + + methodContent += `**Endpoint:** \`${methodType.toUpperCase()} ${endpoint}\`\n\n`; + + if (openApiOperation["x-ratelimit-tier"]) { + methodContent += `**Rate limit tier:** ${openApiOperation["x-ratelimit-tier"]}\n\n`; + } + + const methodPagePath = path.join(subresourceDir, `${methodName}.md`); + writeMarkdownFile(methodPagePath, methodContent); + } + } + } + + // Generate schema pages for this subresource + if (subresource.models) { + for (const [modelName, modelRef] of Object.entries(subresource.models)) { + const schemaContent = getSchemaMarkdownContent( + modelName, + modelRef as string, + openApiSpec, + ); + + if (schemaContent.trim()) { + const schemaPagePath = path.join( + subresourceDir, + "schemas", + `${modelName}.md`, + ); + writeMarkdownFile(schemaPagePath, schemaContent); + } + } + } + + // Recursively process nested subresources + if (subresource.subresources) { + for (const [nestedName, nestedSubresource] of Object.entries( + subresource.subresources, + )) { + generateSubresourcePages( + baseDir, + resourceName, + [...subresourcePath, nestedName], + nestedSubresource, + openApiSpec, + ); + } + } +} + // Function to generate both API and MAPI reference files async function generateAllApiReferenceMarkdownFiles() { await generateApiReferenceMarkdownFiles("api"); @@ -522,57 +749,109 @@ function getSchemaMarkdownContent( return content; } -// Generate CLI reference markdown file -async function generateCliReferenceMarkdownFile() { - try { - const fileName = "cli.md"; - const filePath = path.join(process.cwd(), "public", fileName); +// Load CLI content from individual MDX files in content/cli/ +function loadCliSectionContent(resourceName: string): Map { + const contentPath = path.join( + process.cwd(), + "content", + "cli", + `${resourceName}.mdx`, + ); - // Load CLI content from MDX file - const contentPath = path.join( - process.cwd(), - "content", - "__cli", - "content.mdx", - ); + if (!fs.existsSync(contentPath)) { + return new Map(); + } - if (!fs.existsSync(contentPath)) { - console.warn("⚠️ CLI content file not found, skipping CLI reference"); - return; - } + const mdxContent = fs.readFileSync(contentPath, "utf-8"); + return parseSectionContent(mdxContent, "path"); +} - const mdxContent = fs.readFileSync(contentPath, "utf-8"); - const sectionContent = parseSectionContent(mdxContent, "path"); +// Generate CLI reference markdown files +// Generates: 1) combined file, 2) per-resource files, 3) per-method files +async function generateCliReferenceMarkdownFile() { + try { + const combinedFilePath = path.join(process.cwd(), "public", "cli.md"); + const baseDir = "cli"; - let content = `# CLI Reference\n\n`; + let combinedContent = `# CLI Reference\n\n`; // Iterate through CLI sidebar to maintain order for (const section of CLI_SIDEBAR) { + // Extract resource name from section slug (e.g., "/cli/overview" -> "overview") + const resourceMatch = section.slug.match(/^\/cli\/(.+)$/); + if (!resourceMatch) continue; + + const resourceName = resourceMatch[1]; + + // Load content for this resource from its MDX file + const sectionContent = loadCliSectionContent(resourceName); + + let resourceContent = ""; + if (section.title) { - content += `## ${section.title}\n\n`; + combinedContent += `## ${section.title}\n\n`; + resourceContent += `# ${section.title}\n\n`; } if (section.pages) { for (const page of section.pages) { // Build the full path from section slug + page slug - const fullPath = `${section.slug.replace("/cli", "")}${page.slug}`; + // e.g., "/overview" + "/installation" -> "/overview/installation" + // For root pages (slug="/"), the path is just the resource path (e.g., "/overview") + const basePath = section.slug.replace("/cli", ""); + const fullPath = + page.slug === "/" ? basePath : `${basePath}${page.slug}`; const pageContent = sectionContent.get(fullPath) || ""; if (page.title) { - content += `### ${page.title}\n\n`; + combinedContent += `### ${page.title}\n\n`; + resourceContent += `## ${page.title}\n\n`; } if (pageContent) { - content += pageContent + "\n\n"; + combinedContent += pageContent + "\n\n"; + resourceContent += pageContent + "\n\n"; + + // Generate individual method/page file + // Handle root page (slug = "/") specially + const pageSlug = + page.slug === "/" ? "index" : page.slug.replace(/^\//, ""); + const pageFilePath = path.join( + process.cwd(), + "public", + baseDir, + resourceName, + `${pageSlug}.md`, + ); + + let individualPageContent = ""; + if (page.title) { + individualPageContent += `# ${page.title}\n\n`; + } + individualPageContent += pageContent + "\n"; + + writeMarkdownFile(pageFilePath, individualPageContent); } } } + + // Write the per-resource combined file + if (resourceContent.trim()) { + const resourceFilePath = path.join( + process.cwd(), + "public", + baseDir, + `${resourceName}.md`, + ); + writeMarkdownFile(resourceFilePath, resourceContent); + } } - fs.writeFileSync(filePath, content, "utf-8"); - console.log("✅ CLI reference markdown file generated successfully"); + // Write the combined file + fs.writeFileSync(combinedFilePath, combinedContent, "utf-8"); + console.log("✅ CLI reference markdown files generated successfully"); } catch (error) { - console.error("Error generating CLI reference markdown file:", error); + console.error("Error generating CLI reference markdown files:", error); throw error; } } diff --git a/scripts/splitOpenApiSpec.ts b/scripts/splitOpenApiSpec.ts new file mode 100644 index 000000000..a7a345c24 --- /dev/null +++ b/scripts/splitOpenApiSpec.ts @@ -0,0 +1,443 @@ +/** + * Build-time script that splits the OpenAPI spec into per-resource JSON files. + * This reduces the data sent to each resource page from ~19k lines to just what's needed. + * + * Run with: yarn split-specs + */ + +import { dereference } from "@scalar/openapi-parser"; +import { OpenAPIV3 } from "@scalar/openapi-types"; +import deepmerge from "deepmerge"; +import { readFile, writeFile, mkdir } from "fs/promises"; +import JSONPointer from "jsonpointer"; +import safeStringify from "safe-stringify"; +import { parse } from "yaml"; + +// ============================================================================ +// Types +// ============================================================================ + +type StainlessResourceMethod = + | string + | { + type: "http"; + endpoint: string; + positional_params?: string[]; + }; + +type StainlessResource = { + name?: string; + description?: string; + models?: Record; + methods?: Record; + subresources?: Record; +}; + +interface StainlessConfig { + resources: { + [key: string]: StainlessResource; + }; + environments: Record; +} + +type SpecName = "api" | "mapi"; + +/** + * Data structure for a split resource file. + * Contains only the data needed to render a single resource page. + */ +export type SplitResourceData = { + resourceName: string; + resource: StainlessResource; + paths: Record; + schemas: Record; + schemaReferences: Record; + baseUrl: string; +}; + +// ============================================================================ +// Helpers +// ============================================================================ + +function yamlToJson(yaml: string) { + return parse(yaml); +} + +/** + * Resolve endpoint configuration to [methodType, endpoint] tuple. + */ +function resolveEndpoint( + methodConfig: StainlessResourceMethod, +): [string, string] { + const endpointString = + typeof methodConfig === "string" ? methodConfig : methodConfig.endpoint; + const [methodType, endpoint] = endpointString.split(" "); + return [methodType.toLowerCase(), endpoint]; +} + +// ============================================================================ +// Spec Loading +// ============================================================================ + +async function readOpenApiSpec(specName: string): Promise { + const spec = await readFile(`./data/specs/${specName}/openapi.yml`, "utf8"); + const jsonSpec = yamlToJson(spec); + const { schema } = await dereference(jsonSpec); + return JSON.parse(safeStringify(schema)) as OpenAPIV3.Document; +} + +async function readSpecCustomizations(specName: string) { + const spec = await readFile( + `./data/specs/${specName}/customizations.yml`, + "utf8", + ); + return parse(spec); +} + +async function readStainlessSpec(specName: string): Promise { + const customizations = await readSpecCustomizations(specName); + const spec = await readFile(`./data/specs/${specName}/stainless.yml`, "utf8"); + const stainlessSpec = parse(spec); + return deepmerge(stainlessSpec, customizations) as StainlessConfig; +} + +// ============================================================================ +// Path Extraction +// ============================================================================ + +/** + * Extract all endpoints used by a resource and its subresources. + */ +function extractPathsForResource( + resource: StainlessResource, + openApiSpec: OpenAPIV3.Document, +): Record { + const paths: Record = {}; + + // Extract paths from methods + if (resource.methods) { + for (const methodConfig of Object.values(resource.methods)) { + const [methodType, endpoint] = resolveEndpoint(methodConfig); + const pathItem = openApiSpec.paths?.[endpoint]; + + if (pathItem) { + // Only include the specific method we need, not the entire path object + if (!paths[endpoint]) { + paths[endpoint] = {}; + } + const method = methodType as keyof OpenAPIV3.PathItemObject; + if (pathItem[method]) { + (paths[endpoint] as Record)[method] = + pathItem[method]; + } + // Also include path-level parameters if they exist + if (pathItem.parameters) { + paths[endpoint].parameters = pathItem.parameters; + } + } + } + } + + // Recursively extract paths from subresources + if (resource.subresources) { + for (const subresource of Object.values(resource.subresources)) { + Object.assign(paths, extractPathsForResource(subresource, openApiSpec)); + } + } + + return paths; +} + +// ============================================================================ +// Schema Extraction (with dependency resolution) +// ============================================================================ + +/** + * Extract a schema name from a $ref string. + * Example: "#/components/schemas/User" -> "User" + */ +function getSchemaNameFromRef(ref: string): string | null { + const match = ref.match(/^#\/components\/schemas\/(.+)$/); + return match ? match[1] : null; +} + +/** + * Recursively find all schema references within a schema object. + * This handles nested objects, arrays, allOf, oneOf, anyOf, etc. + */ +function findSchemaReferences(obj: unknown, refs: Set): void { + if (!obj || typeof obj !== "object") { + return; + } + + if (Array.isArray(obj)) { + for (const item of obj) { + findSchemaReferences(item, refs); + } + return; + } + + const record = obj as Record; + + // Check for $ref + if (typeof record.$ref === "string") { + refs.add(record.$ref); + } + + // Recurse into all properties + for (const value of Object.values(record)) { + findSchemaReferences(value, refs); + } +} + +/** + * Extract schemas referenced by a resource and all their dependencies. + * Uses recursive resolution to include nested schema references. + */ +function extractSchemasForResource( + resource: StainlessResource, + openApiSpec: OpenAPIV3.Document, + collectedSchemas: Map = new Map(), + visited: Set = new Set(), +): Map { + // First, collect schemas directly referenced in models + if (resource.models) { + for (const modelRef of Object.values(resource.models)) { + extractSchemaWithDependencies( + modelRef, + openApiSpec, + collectedSchemas, + visited, + ); + } + } + + // Also extract schemas referenced in operation bodies and responses + if (resource.methods) { + for (const methodConfig of Object.values(resource.methods)) { + const [methodType, endpoint] = resolveEndpoint(methodConfig); + const operation = openApiSpec.paths?.[endpoint]?.[ + methodType as keyof OpenAPIV3.PathItemObject + ] as OpenAPIV3.OperationObject | undefined; + + if (operation) { + // Find all refs in the operation + const refs = new Set(); + findSchemaReferences(operation, refs); + + for (const ref of refs) { + extractSchemaWithDependencies( + ref, + openApiSpec, + collectedSchemas, + visited, + ); + } + } + } + } + + // Recursively process subresources + if (resource.subresources) { + for (const subresource of Object.values(resource.subresources)) { + extractSchemasForResource( + subresource, + openApiSpec, + collectedSchemas, + visited, + ); + } + } + + return collectedSchemas; +} + +/** + * Extract a single schema and all its dependencies recursively. + */ +function extractSchemaWithDependencies( + schemaRef: string, + openApiSpec: OpenAPIV3.Document, + collectedSchemas: Map, + visited: Set, +): void { + if (visited.has(schemaRef)) { + return; + } + visited.add(schemaRef); + + // Get the schema + const schema = JSONPointer.get(openApiSpec, schemaRef.replace("#", "")) as + | OpenAPIV3.SchemaObject + | undefined; + + if (!schema) { + return; + } + + // Extract schema name from ref + const schemaName = getSchemaNameFromRef(schemaRef); + if (schemaName) { + collectedSchemas.set(schemaName, schema); + } + + // Find all nested references in this schema + const nestedRefs = new Set(); + findSchemaReferences(schema, nestedRefs); + + // Recursively extract each referenced schema + for (const ref of nestedRefs) { + extractSchemaWithDependencies(ref, openApiSpec, collectedSchemas, visited); + } +} + +// ============================================================================ +// Schema References Builder +// ============================================================================ + +/** + * Build schema references map for a resource (used for cross-linking). + */ +function buildSchemaReferencesForResource( + resource: StainlessResource, + openApiSpec: OpenAPIV3.Document, + basePath: string, +): Record { + const schemaReferences: Record = {}; + + if (resource.models) { + for (const [modelName, modelRef] of Object.entries(resource.models)) { + const schema = JSONPointer.get(openApiSpec, modelRef.replace("#", "")) as + | OpenAPIV3.SchemaObject + | undefined; + + const title = schema?.title ?? modelName; + + if (schema) { + schemaReferences[title] = `${basePath}/schemas/${modelName}`; + schemaReferences[`${title}[]`] = `${basePath}/schemas/${modelName}`; + } + } + } + + if (resource.subresources) { + for (const [subresourceName, subresource] of Object.entries( + resource.subresources, + )) { + Object.assign( + schemaReferences, + buildSchemaReferencesForResource( + subresource, + openApiSpec, + `${basePath}/${subresourceName}`, + ), + ); + } + } + + return schemaReferences; +} + +/** + * Build complete schema references for all resources (for cross-resource linking). + */ +function buildAllSchemaReferences( + stainlessSpec: StainlessConfig, + openApiSpec: OpenAPIV3.Document, + basePath: string, +): Record { + const schemaReferences: Record = {}; + + for (const [resourceName, resource] of Object.entries( + stainlessSpec.resources, + )) { + Object.assign( + schemaReferences, + buildSchemaReferencesForResource( + resource, + openApiSpec, + `${basePath}/${resourceName}`, + ), + ); + } + + return schemaReferences; +} + +// ============================================================================ +// Main Split Function +// ============================================================================ + +async function splitSpec(specName: SpecName): Promise { + console.log(`Splitting ${specName} spec...`); + + const [openApiSpec, stainlessSpec] = await Promise.all([ + readOpenApiSpec(specName), + readStainlessSpec(specName), + ]); + + const basePath = specName === "api" ? "/api-reference" : "/mapi-reference"; + const baseUrl = stainlessSpec.environments.production; + + // Build complete schema references for cross-resource linking + const allSchemaReferences = buildAllSchemaReferences( + stainlessSpec, + openApiSpec, + basePath, + ); + + // Create output directory + const outputDir = `./data/specs/${specName}/resources`; + await mkdir(outputDir, { recursive: true }); + + // Process each resource + for (const [resourceName, resource] of Object.entries( + stainlessSpec.resources, + )) { + console.log(` Processing ${resourceName}...`); + + // Extract paths for this resource + const paths = extractPathsForResource(resource, openApiSpec); + + // Extract schemas with dependencies + const schemasMap = extractSchemasForResource(resource, openApiSpec); + const schemas: Record = {}; + for (const [name, schema] of schemasMap) { + schemas[name] = schema; + } + + // Build the split resource data + const splitData: SplitResourceData = { + resourceName, + resource, + paths, + schemas, + schemaReferences: allSchemaReferences, + baseUrl, + }; + + // Write to file + const outputPath = `${outputDir}/${resourceName}.json`; + await writeFile(outputPath, JSON.stringify(splitData, null, 2)); + console.log(` -> ${outputPath}`); + } + + console.log(`Done splitting ${specName} spec.`); +} + +// ============================================================================ +// Entry Point +// ============================================================================ + +async function main() { + console.log("Splitting OpenAPI specs by resource...\n"); + + await Promise.all([splitSpec("api"), splitSpec("mapi")]); + + console.log("\nAll specs split successfully."); +} + +main().catch((error) => { + console.error("Error splitting specs:", error); + process.exit(1); +}); diff --git a/styles/global.css b/styles/global.css index 8c3acc9af..f67eabdff 100644 --- a/styles/global.css +++ b/styles/global.css @@ -12,7 +12,10 @@ color: var(--tgph-gray-12) !important; } -.tgraph-content h1, .tgraph-content h2, .tgraph-content h3, .tgraph-content h4 { +.tgraph-content h1, +.tgraph-content h2, +.tgraph-content h3, +.tgraph-content h4 { color: var(--tgph-gray-12) !important; } @@ -26,6 +29,15 @@ margin-bottom: var(--tgph-spacing-4); } +.tgraph-content ul, +.tgraph-content ol { + margin-left: var(--tgph-spacing-4); + list-style-type: disc; + display: flex; + flex-direction: column; + gap: var(--tgph-spacing-2); +} + .tgraph-content > div, .tgraph-content > img, .tgraph-content > blockquote {