diff --git a/mint.json b/mint.json index 24cf4be1..5583dc61 100644 --- a/mint.json +++ b/mint.json @@ -91,6 +91,10 @@ "modus/error-handling" ] }, + { + "group": "How-To Guides", + "pages": ["modus/deepseek-model"] + }, { "group": "API Reference", "pages": [ diff --git a/modus/deepseek-model.mdx b/modus/deepseek-model.mdx new file mode 100644 index 00000000..e0de344a --- /dev/null +++ b/modus/deepseek-model.mdx @@ -0,0 +1,67 @@ +--- +title: Using DeepSeek +description: "Use the DeepSeek Models with your Modus app" +mode: "wide" +"og:title": "Using DeepSeek - Modus" +--- + +DeepSeek is an AI lab that has developed and released a series of open source +LLMs that are notable for both their performance and cost-efficiency. By using a +Mixture-of-Experts (MoE) system that utilizes only 37 billion of the models' 671 +billion parameters for any task, the DeepSeek-R1 model is able to achieve best +in class performance at a fraction of cost of inference on other comparable +models. In this guide we review how to leverage the DeepSeek models using Modus. + +## Options for using DeepSeek with Modus + +There are two options for invoking DeepSeek models in your Modus app: + +1. [Use the distilled DeepSeek model hosted by Hypermode](#using-the-distilled-deepseek-model-hosted-by-Hypermode) + Hypermode hosts and makes available the distilled DeepSeek model which can be + used by Modus apps developed locally and deployed to Hypermode +2. [Use the DeepSeek API with your Modus app](#using-the-deepseek-api-with-modus) + Access DeepSeek models hosted on the DeepSeek platform by configuring a + DeepSeek connection in your Modus app and using your DeepSeek API key + +## Using the distilled DeepSeek model hosted by Hypermode + +TODO: explain what a distilled model is + +```json modus.json +"deepseek-r1-distill": { + "sourceModel": "deepseek-ai/DeepSeek-R1-Distill-Llama-8B", + "provider": "hugging-face", + "connection": "hypermode" +} +``` + +## Using the DeepSeek API with Modus + +```json modus.json +{ + "$schema": "https://schema.hypermode.com/modus.json", + "endpoints": { + "default": { + "type": "graphql", + "path": "/graphql", + "auth": "bearer-token" + } + }, + "models": { + "deepseek-chat": { + "sourceModel": "deepseek-chat", + "connection": "deepseek", + "path": "v1/chat/completions" + } + }, + "connections": { + "deepseek": { + "type": "http", + "baseUrl": "https://api.deepseek.com/", + "headers": { + "Authorization": "Bearer {{API_TOKEN}}" + } + } + } +} +```