From 4d2171c06355b41f26117fb9a511d39fa0634fdd Mon Sep 17 00:00:00 2001 From: straydragon Date: Thu, 3 Jul 2025 23:14:03 +0800 Subject: [PATCH 1/6] submodule: update to latest --- libs/dify | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/dify b/libs/dify index cea6522..a79f37b 160000 --- a/libs/dify +++ b/libs/dify @@ -1 +1 @@ -Subproject commit cea6522122e63b51aedbabe0e000f1efc72faa8b +Subproject commit a79f37b686ca4a487df4311f413cc90e52e7af44 From b01d8d6dfa51251ef0c24310431ee04ef0da91f8 Mon Sep 17 00:00:00 2001 From: straydragon Date: Thu, 3 Jul 2025 23:41:16 +0800 Subject: [PATCH 2/6] gen: diff --- .../1.5.0__1.5.1.diff | 135 ++++++++++++++++++ 1 file changed, 135 insertions(+) create mode 100644 misc/official_api_doc_changes/1.5.0__1.5.1.diff diff --git a/misc/official_api_doc_changes/1.5.0__1.5.1.diff b/misc/official_api_doc_changes/1.5.0__1.5.1.diff new file mode 100644 index 0000000..715fcb3 --- /dev/null +++ b/misc/official_api_doc_changes/1.5.0__1.5.1.diff @@ -0,0 +1,135 @@ +diff --git a/web/app/(commonLayout)/datasets/template/template.zh.mdx b/web/app/(commonLayout)/datasets/template/template.zh.mdx +index d407fad3c..c21ce3bf5 100644 +--- a/web/app/(commonLayout)/datasets/template/template.zh.mdx ++++ b/web/app/(commonLayout)/datasets/template/template.zh.mdx +@@ -1131,6 +1131,130 @@ import { Row, Col, Properties, Property, Heading, SubProperty, PropertyInstructi + +
+ ++ ++ ++ ++ 获取文档详情. ++ ### Path ++ - `dataset_id` (string) 知识库 ID ++ - `document_id` (string) 文档 ID ++ ++ ### Query ++ - `metadata` (string) metadata 过滤条件 `all`, `only`, 或者 `without`. 默认是 `all`. ++ ++ ### Response ++ 返回知识库文档的详情. ++ ++ ++ ### Request Example ++ ++ ```bash {{ title: 'cURL' }} ++ curl -X GET '${props.apiBaseUrl}/datasets/{dataset_id}/documents/{document_id}' \ ++ -H 'Authorization: Bearer {api_key}' ++ ``` ++ ++ ++ ### Response Example ++ ++ ```json {{ title: 'Response' }} ++ { ++ "id": "f46ae30c-5c11-471b-96d0-464f5f32a7b2", ++ "position": 1, ++ "data_source_type": "upload_file", ++ "data_source_info": { ++ "upload_file": { ++ ... ++ } ++ }, ++ "dataset_process_rule_id": "24b99906-845e-499f-9e3c-d5565dd6962c", ++ "dataset_process_rule": { ++ "mode": "hierarchical", ++ "rules": { ++ "pre_processing_rules": [ ++ { ++ "id": "remove_extra_spaces", ++ "enabled": true ++ }, ++ { ++ "id": "remove_urls_emails", ++ "enabled": false ++ } ++ ], ++ "segmentation": { ++ "separator": "**********page_ending**********", ++ "max_tokens": 1024, ++ "chunk_overlap": 0 ++ }, ++ "parent_mode": "paragraph", ++ "subchunk_segmentation": { ++ "separator": "\n", ++ "max_tokens": 512, ++ "chunk_overlap": 0 ++ } ++ } ++ }, ++ "document_process_rule": { ++ "id": "24b99906-845e-499f-9e3c-d5565dd6962c", ++ "dataset_id": "48a0db76-d1a9-46c1-ae35-2baaa919a8a9", ++ "mode": "hierarchical", ++ "rules": { ++ "pre_processing_rules": [ ++ { ++ "id": "remove_extra_spaces", ++ "enabled": true ++ }, ++ { ++ "id": "remove_urls_emails", ++ "enabled": false ++ } ++ ], ++ "segmentation": { ++ "separator": "**********page_ending**********", ++ "max_tokens": 1024, ++ "chunk_overlap": 0 ++ }, ++ "parent_mode": "paragraph", ++ "subchunk_segmentation": { ++ "separator": "\n", ++ "max_tokens": 512, ++ "chunk_overlap": 0 ++ } ++ } ++ }, ++ "name": "xxxx", ++ "created_from": "web", ++ "created_by": "17f71940-a7b5-4c77-b60f-2bd645c1ffa0", ++ "created_at": 1750464191, ++ "tokens": null, ++ "indexing_status": "waiting", ++ "completed_at": null, ++ "updated_at": 1750464191, ++ "indexing_latency": null, ++ "error": null, ++ "enabled": true, ++ "disabled_at": null, ++ "disabled_by": null, ++ "archived": false, ++ "segment_count": 0, ++ "average_segment_length": 0, ++ "hit_count": null, ++ "display_status": "queuing", ++ "doc_form": "hierarchical_model", ++ "doc_language": "Chinese Simplified" ++ } ++ ``` ++ ++ ++ ++___ ++
++ ++ + Date: Thu, 3 Jul 2025 23:42:02 +0800 Subject: [PATCH 3/6] feat: 1.5.1 | kb schema update --- schema/knowledge_base.en.yaml | 247 ++++++++++++++++++ schema/knowledge_base.zh.yaml | 246 +++++++++++++++++ .../overlays/knowledge_base.en.overlay.yaml | 201 ++++++++++++++ 3 files changed, 694 insertions(+) diff --git a/schema/knowledge_base.en.yaml b/schema/knowledge_base.en.yaml index 71237b6..cbb2041 100644 --- a/schema/knowledge_base.en.yaml +++ b/schema/knowledge_base.en.yaml @@ -484,6 +484,205 @@ components: hit_count: type: integer description: Hit count + DocumentDetail: + type: object + properties: + id: + type: string + description: Document ID + position: + type: integer + description: Document position + data_source_type: + type: string + description: Data source type + data_source_info: + type: object + description: Data source information + properties: + upload_file: + type: object + description: Upload file information + dataset_process_rule_id: + type: string + description: Dataset process rule ID + dataset_process_rule: + type: object + description: Dataset process rule + properties: + mode: + type: string + description: Process mode + rules: + type: object + description: Rule details + properties: + pre_processing_rules: + type: array + description: Pre-processing rules list + items: + type: object + properties: + id: + type: string + description: Rule ID + enabled: + type: boolean + description: Whether enabled + segmentation: + type: object + description: Segmentation rules + properties: + separator: + type: string + description: Separator + max_tokens: + type: integer + description: Maximum tokens + chunk_overlap: + type: integer + description: Chunk overlap + parent_mode: + type: string + description: Parent mode + subchunk_segmentation: + type: object + description: Sub-chunk segmentation + properties: + separator: + type: string + description: Separator + max_tokens: + type: integer + description: Maximum tokens + chunk_overlap: + type: integer + description: Chunk overlap + document_process_rule: + type: object + description: Document process rule + properties: + id: + type: string + description: Rule ID + dataset_id: + type: string + description: Dataset ID + mode: + type: string + description: Process mode + rules: + type: object + description: Rule details + properties: + pre_processing_rules: + type: array + description: Pre-processing rules list + items: + type: object + properties: + id: + type: string + description: Rule ID + enabled: + type: boolean + description: Whether enabled + segmentation: + type: object + description: Segmentation rules + properties: + separator: + type: string + description: Separator + max_tokens: + type: integer + description: Maximum tokens + chunk_overlap: + type: integer + description: Chunk overlap + parent_mode: + type: string + description: Parent mode + subchunk_segmentation: + type: object + description: Sub-chunk segmentation + properties: + separator: + type: string + description: Separator + max_tokens: + type: integer + description: Maximum tokens + chunk_overlap: + type: integer + description: Chunk overlap + name: + type: string + description: Document name + created_from: + type: string + description: Creation source + created_by: + type: string + description: Creator + created_at: + type: number + description: Creation timestamp + tokens: + type: integer + nullable: true + description: Token count + indexing_status: + type: string + description: Indexing status + completed_at: + type: number + nullable: true + description: Completion timestamp + updated_at: + type: number + description: Update timestamp + indexing_latency: + type: number + nullable: true + description: Indexing latency + error: + type: string + nullable: true + description: Error message + enabled: + type: boolean + description: Whether enabled + disabled_at: + type: number + nullable: true + description: Disabled timestamp + disabled_by: + type: string + nullable: true + description: Disabled by + archived: + type: boolean + description: Whether archived + segment_count: + type: integer + description: Segment count + average_segment_length: + type: number + description: Average segment length + hit_count: + type: integer + nullable: true + description: Hit count + display_status: + type: string + description: Display status + doc_form: + type: string + description: Document form + doc_language: + type: string + description: Document language Segment: type: object properties: @@ -1510,6 +1709,54 @@ paths: '400': $ref: '#/components/responses/Error400' '/datasets/{dataset_id}/documents/{document_id}': + get: + summary: Get Document Detail + description: Get document detail + operationId: getDocumentDetail + tags: + - Documents + parameters: + - name: dataset_id + in: path + description: Knowledge Base ID + required: true + schema: + type: string + - name: document_id + in: path + description: Document ID + required: true + schema: + type: string + - name: metadata + in: query + description: metadata filter condition + required: false + schema: + type: string + enum: + - all + - only + - without + default: all + description: | + Filter conditions: + - all: Include all information + - only: Metadata only + - without: Without metadata + responses: + '200': + description: Successfully retrieved document detail + content: + application/json: + schema: + $ref: '#/components/schemas/DocumentDetail' + '400': + $ref: '#/components/responses/Error400' + '403': + $ref: '#/components/responses/Error403' + '404': + $ref: '#/components/responses/Error404' delete: summary: Delete Document description: Delete specified document diff --git a/schema/knowledge_base.zh.yaml b/schema/knowledge_base.zh.yaml index 228ef10..47c9a38 100644 --- a/schema/knowledge_base.zh.yaml +++ b/schema/knowledge_base.zh.yaml @@ -482,6 +482,206 @@ components: type: integer description: 命中次数 + DocumentDetail: + type: object + properties: + id: + type: string + description: 文档ID + position: + type: integer + description: 文档位置 + data_source_type: + type: string + description: 数据源类型 + data_source_info: + type: object + description: 数据源信息 + properties: + upload_file: + type: object + description: 上传文件信息 + dataset_process_rule_id: + type: string + description: 数据集处理规则ID + dataset_process_rule: + type: object + description: 数据集处理规则 + properties: + mode: + type: string + description: 处理模式 + rules: + type: object + description: 规则详情 + properties: + pre_processing_rules: + type: array + description: 预处理规则列表 + items: + type: object + properties: + id: + type: string + description: 规则ID + enabled: + type: boolean + description: 是否启用 + segmentation: + type: object + description: 分段规则 + properties: + separator: + type: string + description: 分隔符 + max_tokens: + type: integer + description: 最大token数 + chunk_overlap: + type: integer + description: 块重叠数 + parent_mode: + type: string + description: 父级模式 + subchunk_segmentation: + type: object + description: 子块分段 + properties: + separator: + type: string + description: 分隔符 + max_tokens: + type: integer + description: 最大token数 + chunk_overlap: + type: integer + description: 块重叠数 + document_process_rule: + type: object + description: 文档处理规则 + properties: + id: + type: string + description: 规则ID + dataset_id: + type: string + description: 数据集ID + mode: + type: string + description: 处理模式 + rules: + type: object + description: 规则详情 + properties: + pre_processing_rules: + type: array + description: 预处理规则列表 + items: + type: object + properties: + id: + type: string + description: 规则ID + enabled: + type: boolean + description: 是否启用 + segmentation: + type: object + description: 分段规则 + properties: + separator: + type: string + description: 分隔符 + max_tokens: + type: integer + description: 最大token数 + chunk_overlap: + type: integer + description: 块重叠数 + parent_mode: + type: string + description: 父级模式 + subchunk_segmentation: + type: object + description: 子块分段 + properties: + separator: + type: string + description: 分隔符 + max_tokens: + type: integer + description: 最大token数 + chunk_overlap: + type: integer + description: 块重叠数 + name: + type: string + description: 文档名称 + created_from: + type: string + description: 创建来源 + created_by: + type: string + description: 创建者 + created_at: + type: number + description: 创建时间戳 + tokens: + type: integer + nullable: true + description: token数量 + indexing_status: + type: string + description: 索引状态 + completed_at: + type: number + nullable: true + description: 完成时间戳 + updated_at: + type: number + description: 更新时间戳 + indexing_latency: + type: number + nullable: true + description: 索引延迟 + error: + type: string + nullable: true + description: 错误信息 + enabled: + type: boolean + description: 是否启用 + disabled_at: + type: number + nullable: true + description: 禁用时间戳 + disabled_by: + type: string + nullable: true + description: 禁用者 + archived: + type: boolean + description: 是否归档 + segment_count: + type: integer + description: 分段数量 + average_segment_length: + type: number + description: 平均分段长度 + hit_count: + type: integer + nullable: true + description: 命中次数 + display_status: + type: string + description: 显示状态 + doc_form: + type: string + description: 文档形式 + doc_language: + type: string + description: 文档语言 + Segment: type: object properties: @@ -1491,6 +1691,52 @@ paths: $ref: "#/components/responses/Error400" /datasets/{dataset_id}/documents/{document_id}: + get: + summary: 获取文档详情 + description: 获取文档详情 + operationId: getDocumentDetail + tags: + - Documents + parameters: + - name: dataset_id + in: path + description: 知识库 ID + required: true + schema: + type: string + - name: document_id + in: path + description: 文档 ID + required: true + schema: + type: string + - name: metadata + in: query + description: metadata 过滤条件 + required: false + schema: + type: string + enum: [all, only, without] + default: all + description: | + 过滤条件: + - all: 包含所有信息 + - only: 仅元数据 + - without: 不包含元数据 + responses: + "200": + description: 成功获取文档详情 + content: + application/json: + schema: + $ref: "#/components/schemas/DocumentDetail" + "400": + $ref: "#/components/responses/Error400" + "403": + $ref: "#/components/responses/Error403" + "404": + $ref: "#/components/responses/Error404" + delete: summary: 删除文档 description: 删除指定的文档 diff --git a/schema/overlays/knowledge_base.en.overlay.yaml b/schema/overlays/knowledge_base.en.overlay.yaml index 60956cd..78e5695 100644 --- a/schema/overlays/knowledge_base.en.overlay.yaml +++ b/schema/overlays/knowledge_base.en.overlay.yaml @@ -1877,3 +1877,204 @@ actions: - target: $.paths['/datasets/{dataset_id}/documents/status/{action}'].patch.responses['200'].content['application/json'].schema.properties.result.description update: "Operation result" + + # Get document detail API translations + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.summary + update: "Get Document Detail" + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.description + update: "Get document detail" + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.parameters[0].description + update: "Knowledge Base ID" + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.parameters[1].description + update: "Document ID" + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.parameters[2].description + update: "metadata filter condition" + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.parameters[2].schema.description + update: | + Filter conditions: + - all: Include all information + - only: Metadata only + - without: Without metadata + + - target: $.paths['/datasets/{dataset_id}/documents/{document_id}'].get.responses['200'].description + update: "Successfully retrieved document detail" + + # Document detail schema translations + - target: $.components.schemas.DocumentDetail.properties.id.description + update: "Document ID" + + - target: $.components.schemas.DocumentDetail.properties.position.description + update: "Document position" + + - target: $.components.schemas.DocumentDetail.properties.data_source_type.description + update: "Data source type" + + - target: $.components.schemas.DocumentDetail.properties.data_source_info.description + update: "Data source information" + + - target: $.components.schemas.DocumentDetail.properties.data_source_info.properties.upload_file.description + update: "Upload file information" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule_id.description + update: "Dataset process rule ID" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.description + update: "Dataset process rule" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.mode.description + update: "Process mode" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.description + update: "Rule details" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.pre_processing_rules.description + update: "Pre-processing rules list" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.pre_processing_rules.items.properties.id.description + update: "Rule ID" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.pre_processing_rules.items.properties.enabled.description + update: "Whether enabled" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.segmentation.description + update: "Segmentation rules" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.segmentation.properties.separator.description + update: "Separator" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.segmentation.properties.max_tokens.description + update: "Maximum tokens" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.segmentation.properties.chunk_overlap.description + update: "Chunk overlap" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.parent_mode.description + update: "Parent mode" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.subchunk_segmentation.description + update: "Sub-chunk segmentation" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.subchunk_segmentation.properties.separator.description + update: "Separator" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.subchunk_segmentation.properties.max_tokens.description + update: "Maximum tokens" + + - target: $.components.schemas.DocumentDetail.properties.dataset_process_rule.properties.rules.properties.subchunk_segmentation.properties.chunk_overlap.description + update: "Chunk overlap" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.description + update: "Document process rule" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.id.description + update: "Rule ID" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.dataset_id.description + update: "Dataset ID" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.mode.description + update: "Process mode" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.description + update: "Rule details" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.pre_processing_rules.description + update: "Pre-processing rules list" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.pre_processing_rules.items.properties.id.description + update: "Rule ID" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.pre_processing_rules.items.properties.enabled.description + update: "Whether enabled" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.segmentation.description + update: "Segmentation rules" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.segmentation.properties.separator.description + update: "Separator" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.segmentation.properties.max_tokens.description + update: "Maximum tokens" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.segmentation.properties.chunk_overlap.description + update: "Chunk overlap" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.parent_mode.description + update: "Parent mode" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.subchunk_segmentation.description + update: "Sub-chunk segmentation" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.subchunk_segmentation.properties.separator.description + update: "Separator" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.subchunk_segmentation.properties.max_tokens.description + update: "Maximum tokens" + + - target: $.components.schemas.DocumentDetail.properties.document_process_rule.properties.rules.properties.subchunk_segmentation.properties.chunk_overlap.description + update: "Chunk overlap" + + - target: $.components.schemas.DocumentDetail.properties.name.description + update: "Document name" + + - target: $.components.schemas.DocumentDetail.properties.created_from.description + update: "Creation source" + + - target: $.components.schemas.DocumentDetail.properties.created_by.description + update: "Creator" + + - target: $.components.schemas.DocumentDetail.properties.created_at.description + update: "Creation timestamp" + + - target: $.components.schemas.DocumentDetail.properties.tokens.description + update: "Token count" + + - target: $.components.schemas.DocumentDetail.properties.indexing_status.description + update: "Indexing status" + + - target: $.components.schemas.DocumentDetail.properties.completed_at.description + update: "Completion timestamp" + + - target: $.components.schemas.DocumentDetail.properties.updated_at.description + update: "Update timestamp" + + - target: $.components.schemas.DocumentDetail.properties.indexing_latency.description + update: "Indexing latency" + + - target: $.components.schemas.DocumentDetail.properties.error.description + update: "Error message" + + - target: $.components.schemas.DocumentDetail.properties.enabled.description + update: "Whether enabled" + + - target: $.components.schemas.DocumentDetail.properties.disabled_at.description + update: "Disabled timestamp" + + - target: $.components.schemas.DocumentDetail.properties.disabled_by.description + update: "Disabled by" + + - target: $.components.schemas.DocumentDetail.properties.archived.description + update: "Whether archived" + + - target: $.components.schemas.DocumentDetail.properties.segment_count.description + update: "Segment count" + + - target: $.components.schemas.DocumentDetail.properties.average_segment_length.description + update: "Average segment length" + + - target: $.components.schemas.DocumentDetail.properties.hit_count.description + update: "Hit count" + + - target: $.components.schemas.DocumentDetail.properties.display_status.description + update: "Display status" + + - target: $.components.schemas.DocumentDetail.properties.doc_form.description + update: "Document form" + + - target: $.components.schemas.DocumentDetail.properties.doc_language.description + update: "Document language" From eeaa0b98d8eee0292cc21ae4c089ccc1ccd087a9 Mon Sep 17 00:00:00 2001 From: straydragon Date: Thu, 3 Jul 2025 23:42:57 +0800 Subject: [PATCH 4/6] bump: version --- README.md | 12 ++-- README.zh.md | 12 ++-- pyproject.toml | 2 +- schema/app_advanced_chat.en.yaml | 4 +- schema/app_advanced_chat.zh.yaml | 4 +- schema/app_chat.en.yaml | 4 +- schema/app_chat.zh.yaml | 4 +- schema/app_generation.en.yaml | 2 +- schema/app_generation.zh.yaml | 4 +- schema/app_workflow.en.yaml | 2 +- schema/app_workflow.zh.yaml | 4 +- schema/knowledge_base.en.yaml | 2 +- schema/knowledge_base.zh.yaml | 2 +- uv.lock | 104 +++++++++++++++---------------- 14 files changed, 81 insertions(+), 81 deletions(-) diff --git a/README.md b/README.md index b834ba0..42a8760 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ # Dify x OpenAPI -[![Dify Version Support](https://img.shields.io/badge/Support_Dify_Version-1.5.0-blue)](https://github.com/langgenius/dify) +[![Dify Version Support](https://img.shields.io/badge/Support_Dify_Version-1.5.1-blue)](https://github.com/langgenius/dify) [![Code style: ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) [![License](https://img.shields.io/badge/license-MIT-green.svg)](LICENSE) [![Package Manager: uv](https://img.shields.io/badge/package%20manager-uv-black)](https://github.com/astral-sh/uv) @@ -39,7 +39,7 @@ Provides OpenAPI Schema for [Dify](https://github.com/langgenius/dify) API, whic > [!tip] > This indicates that the API has passed at least one test case request. If you find any API errors, feel free to submit an issue or PR! -- Knowledge Base: [OpenAPI Schema(中文)](./schema/knowledge_base.zh.yaml) | [OpenAPI Schema(English)](./schema/knowledge_base.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.0/web/app/(commonLayout)/datasets/template) +- Knowledge Base: [OpenAPI Schema(中文)](./schema/knowledge_base.zh.yaml) | [OpenAPI Schema(English)](./schema/knowledge_base.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.1/web/app/(commonLayout)/datasets/template) - [x] POST /datasets - Create empty knowledge base - [x] POST /datasets/{dataset_id} - Update knowledge base - [x] GET /datasets/{dataset_id}/documents - Get document list @@ -71,7 +71,7 @@ Provides OpenAPI Schema for [Dify](https://github.com/langgenius/dify) API, whic - [x] POST /datasets/{dataset_id}/tags - Query dataset bound tags - [x] POST /datasets/{dataset_id}/retrieval - Retrieve with metadata filtering conditions -- Chat Application: [OpenAPI Schema(中文)](./schema/app_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_chat.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- Chat Application: [OpenAPI Schema(中文)](./schema/app_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_chat.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /chat-messages - Send conversation message - [x] POST /files/upload - Upload file - [x] POST /messages/{message_id}/feedbacks - Message feedback @@ -84,7 +84,7 @@ Provides OpenAPI Schema for [Dify](https://github.com/langgenius/dify) API, whic - [x] GET /info - Get application basic information - [x] GET /parameters - Get application parameters -- Advanced Chat Application: [OpenAPI Schema(中文)](./schema/app_advanced_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_advanced_chat.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- Advanced Chat Application: [OpenAPI Schema(中文)](./schema/app_advanced_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_advanced_chat.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /audio-to-text - Speech to text - [x] POST /text-to-audio - Text to speech - [x] GET /apps/annotations - Get annotation list @@ -100,10 +100,10 @@ Provides OpenAPI Schema for [Dify](https://github.com/langgenius/dify) API, whic - [x] GET /parameters - Get application parameters -- Text Generation Application: [OpenAPI Schema(中文)](./schema/app_generation.zh.yaml) | [OpenAPI Schema(English)](./schema/app_generation.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- Text Generation Application: [OpenAPI Schema(中文)](./schema/app_generation.zh.yaml) | [OpenAPI Schema(English)](./schema/app_generation.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /completion-messages - Send message -- Workflow Application: [OpenAPI Schema(中文)](./schema/app_workflow.zh.yaml) | [OpenAPI Schema(English)](./schema/app_workflow.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- Workflow Application: [OpenAPI Schema(中文)](./schema/app_workflow.zh.yaml) | [OpenAPI Schema(English)](./schema/app_workflow.en.yaml) | [Official Documentation Source](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /workflows/run - Execute workflow - [x] GET /workflows/run/{workflow_run_id} - Get workflow execution status - [x] POST /workflows/tasks/{task_id}/stop - Stop response diff --git a/README.zh.md b/README.zh.md index 7ee9235..8d7ba66 100644 --- a/README.zh.md +++ b/README.zh.md @@ -1,6 +1,6 @@ # Dify x OpenAPI -[![Dify Version Support](https://img.shields.io/badge/Support_Dify_Version-1.5.0-blue)](https://github.com/langgenius/dify) +[![Dify Version Support](https://img.shields.io/badge/Support_Dify_Version-1.5.1-blue)](https://github.com/langgenius/dify) [![Code style: ruff](https://img.shields.io/badge/code%20style-ruff-000000.svg)](https://github.com/astral-sh/ruff) [![License](https://img.shields.io/badge/license-MIT-green.svg)](LICENSE) [![Package Manager: uv](https://img.shields.io/badge/package%20manager-uv-black)](https://github.com/astral-sh/uv) @@ -41,7 +41,7 @@ > [!tip] > 这里指至少可以通过一次测试用例请求, 如果你发现有哪些API错误, 欢迎提issue或者pr! -- 知识库: [OpenAPI Schema(中文)](./schema/knowledge_base.zh.yaml) | [OpenAPI Schema(English)](./schema/knowledge_base.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.0/web/app/(commonLayout)/datasets/template) +- 知识库: [OpenAPI Schema(中文)](./schema/knowledge_base.zh.yaml) | [OpenAPI Schema(English)](./schema/knowledge_base.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.1/web/app/(commonLayout)/datasets/template) - [x] POST /datasets - 创建空知识库 - [x] POST /datasets/{dataset_id} - 更新知识库 - [x] GET /datasets/{dataset_id}/documents - 获取文档列表 @@ -73,7 +73,7 @@ - [x] POST /datasets/{dataset_id}/tags - 查询知识库已绑定的标签 - [x] POST /datasets/{dataset_id}/retrieval - 带元数据过滤条件的检索 -- 聊天应用: [OpenAPI Schema(中文)](./schema/app_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_chat.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- 聊天应用: [OpenAPI Schema(中文)](./schema/app_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_chat.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /chat-messages - 发送对话消息 - [x] POST /files/upload - 上传文件 - [x] POST /messages/{message_id}/feedbacks - 消息反馈 @@ -86,7 +86,7 @@ - [x] GET /info - 获取应用基本信息 - [x] GET /parameters - 获取应用参数 -- 高级聊天应用: [OpenAPI Schema(中文)](./schema/app_advanced_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_advanced_chat.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- 高级聊天应用: [OpenAPI Schema(中文)](./schema/app_advanced_chat.zh.yaml) | [OpenAPI Schema(English)](./schema/app_advanced_chat.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /audio-to-text - 语音转文字 - [x] POST /text-to-audio - 文字转语音 - [x] GET /apps/annotations - 获取标注列表 @@ -101,10 +101,10 @@ - [x] GET /info - 获取应用基本信息 - [x] GET /parameters - 获取应用参数(包含文字转语音设置) -- 文本生成应用: [OpenAPI Schema(中文)](./schema/app_generation.zh.yaml) | [OpenAPI Schema(English)](./schema/app_generation.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- 文本生成应用: [OpenAPI Schema(中文)](./schema/app_generation.zh.yaml) | [OpenAPI Schema(English)](./schema/app_generation.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /completion-messages - 发送消息 -- 工作流应用: [OpenAPI Schema(中文)](./schema/app_workflow.zh.yaml) | [OpenAPI Schema(English)](./schema/app_workflow.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.0/web/app/components/develop/template) +- 工作流应用: [OpenAPI Schema(中文)](./schema/app_workflow.zh.yaml) | [OpenAPI Schema(English)](./schema/app_workflow.en.yaml) | [官方文档源码](https://github.com/langgenius/dify/tree/1.5.1/web/app/components/develop/template) - [x] POST /workflows/run - 执行工作流 - [x] GET /workflows/run/{workflow_run_id} - 获取工作流执行状态 - [x] POST /workflows/tasks/{task_id}/stop - 停止响应 diff --git a/pyproject.toml b/pyproject.toml index 679a855..0701bb8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "dify-openapi" -version = "1.5.0" +version = "1.5.1" description = "a client for testing and validating dify openapi schema" readme = "README.md" authors = [{ name = "straydragon", email = "straydragonl@foxmail.com" }] diff --git a/schema/app_advanced_chat.en.yaml b/schema/app_advanced_chat.en.yaml index 82f7987..e8e9457 100644 --- a/schema/app_advanced_chat.en.yaml +++ b/schema/app_advanced_chat.en.yaml @@ -6,8 +6,8 @@ info: Chat applications support session persistence, allowing previous chat history to be used as context for responses, suitable for chat/customer service AI, etc. All API requests require an application-level API-Key in the Authorization HTTP Header. It is strongly recommended that developers store the API-Key on the backend rather than sharing or storing it on the client side to prevent API-Key leakage and financial loss. - official raw document: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_advanced_chat.en.mdx - version: 1.5.0 + official raw document: https://github.com/langgenius/dify/blob/1.4.3/web/app/components/develop/template/template_advanced_chat.en.mdx + version: 1.5.1 servers: - url: 'https://api.dify.ai/v1' description: Dify API Server diff --git a/schema/app_advanced_chat.zh.yaml b/schema/app_advanced_chat.zh.yaml index 992a6bc..2470552 100644 --- a/schema/app_advanced_chat.zh.yaml +++ b/schema/app_advanced_chat.zh.yaml @@ -6,8 +6,8 @@ info: 对话应用支持会话持久化,可将之前的聊天记录作为上下文进行回答,可适用于聊天/客服 AI 等。 所有 API 请求都需要在 Authorization HTTP Header 中包含应用级 API-Key。 强烈建议开发者把 API-Key 放在后端存储,而非分享或者放在客户端存储,以免 API-Key 泄露,导致财产损失。 - 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_advanced_chat.zh.mdx - version: 1.5.0 + 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.1/web/app/components/develop/template/template_advanced_chat.zh.mdx + version: 1.5.1 servers: - url: https://api.dify.ai/v1 diff --git a/schema/app_chat.en.yaml b/schema/app_chat.en.yaml index 6aac2ff..d07124e 100644 --- a/schema/app_chat.en.yaml +++ b/schema/app_chat.en.yaml @@ -5,8 +5,8 @@ info: Dify API provides a series of interfaces for conversation functionality. It supports conversational applications. All API requests require an application-level API-Key in the Authorization HTTP Header. - official raw document: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_chat.en.mdx - version: 1.5.0 + official raw document: https://github.com/langgenius/dify/blob/1.4.3/web/app/components/develop/template/template_chat.en.mdx + version: 1.5.1 servers: - url: 'https://api.dify.ai/v1' description: Dify API Server diff --git a/schema/app_chat.zh.yaml b/schema/app_chat.zh.yaml index 0d4cf5b..d363b65 100644 --- a/schema/app_chat.zh.yaml +++ b/schema/app_chat.zh.yaml @@ -5,8 +5,8 @@ info: Dify API 提供了一系列接口用于对话功能。 支持对话型应用。 所有 API 请求都需要在 Authorization HTTP Header 中包含应用级 API-Key。 - 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_chat.zh.mdx - version: 1.5.0 + 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.1/web/app/components/develop/template/template_chat.zh.mdx + version: 1.5.1 servers: - url: https://api.dify.ai/v1 diff --git a/schema/app_generation.en.yaml b/schema/app_generation.en.yaml index 884a23b..a866321 100644 --- a/schema/app_generation.en.yaml +++ b/schema/app_generation.en.yaml @@ -6,7 +6,7 @@ info: It supports text generation applications. All API requests require an application-level API-Key in the Authorization HTTP Header. official raw document: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template.en.mdx - version: 1.5.0 + version: 1.5.1 servers: - url: 'https://api.dify.ai/v1' description: Dify API Server diff --git a/schema/app_generation.zh.yaml b/schema/app_generation.zh.yaml index 81ffc88..c40c312 100644 --- a/schema/app_generation.zh.yaml +++ b/schema/app_generation.zh.yaml @@ -5,8 +5,8 @@ info: Dify API 提供了一系列接口用于文本生成功能。 支持文本生成型应用。 所有 API 请求都需要在 Authorization HTTP Header 中包含应用级 API-Key。 - 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template.zh.mdx - version: 1.5.0 + 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.1/web/app/components/develop/template/template.zh.mdx + version: 1.5.1 servers: - url: https://api.dify.ai/v1 diff --git a/schema/app_workflow.en.yaml b/schema/app_workflow.en.yaml index bdafc19..2010e21 100644 --- a/schema/app_workflow.en.yaml +++ b/schema/app_workflow.en.yaml @@ -7,7 +7,7 @@ info: All API requests require an application-level API-Key in the Authorization HTTP Header. It is strongly recommended that developers store the API-Key on the backend rather than sharing or storing it on the client to prevent API-Key leakage and financial loss. official raw document: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_workflow.en.mdx - version: 1.5.0 + version: 1.5.1 servers: - url: 'https://api.dify.ai/v1' description: Dify API Server diff --git a/schema/app_workflow.zh.yaml b/schema/app_workflow.zh.yaml index 6b9fcdd..597421b 100644 --- a/schema/app_workflow.zh.yaml +++ b/schema/app_workflow.zh.yaml @@ -6,8 +6,8 @@ info: Workflow 应用无会话支持,适合用于翻译/文章写作/总结 AI 等等。 所有 API 请求都需要在 Authorization HTTP Header 中包含应用级 API-Key。 强烈建议开发者把 API-Key 放在后端存储,而非分享或者放在客户端存储,以免 API-Key 泄露,导致财产损失。 - 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.0/web/app/components/develop/template/template_workflow.zh.mdx - version: 1.5.0 + 官方原始文档见: https://github.com/langgenius/dify/blob/1.5.1/web/app/components/develop/template/template_workflow.zh.mdx + version: 1.5.1 servers: - url: https://api.dify.ai/v1 diff --git a/schema/knowledge_base.en.yaml b/schema/knowledge_base.en.yaml index cbb2041..1518378 100644 --- a/schema/knowledge_base.en.yaml +++ b/schema/knowledge_base.en.yaml @@ -4,7 +4,7 @@ info: description: | Dify Knowledge Base API provides a series of interfaces for managing knowledge bases, documents, and retrieval functions. All API requests require a knowledge base level API-Key in the Authorization HTTP Header. - version: 1.5.0 + version: 1.5.1 servers: - url: 'https://api.dify.ai/v1' description: Dify API Server diff --git a/schema/knowledge_base.zh.yaml b/schema/knowledge_base.zh.yaml index 47c9a38..0860bc4 100644 --- a/schema/knowledge_base.zh.yaml +++ b/schema/knowledge_base.zh.yaml @@ -4,7 +4,7 @@ info: description: | Dify 知识库 API 提供了一系列接口用于管理知识库、文档和检索功能。 所有 API 请求都需要在 Authorization HTTP Header 中包含知识库级 API-Key。 - version: 1.5.0 + version: 1.5.1 servers: - url: https://api.dify.ai/v1 diff --git a/uv.lock b/uv.lock index f2a2188..11a9391 100644 --- a/uv.lock +++ b/uv.lock @@ -12,7 +12,7 @@ resolution-markers = [ [[package]] name = "annotated-types" version = "0.7.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53" }, @@ -21,7 +21,7 @@ wheels = [ [[package]] name = "anyio" version = "4.9.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, @@ -36,7 +36,7 @@ wheels = [ [[package]] name = "asttokens" version = "3.0.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/4a/e7/82da0a03e7ba5141f05cce0d302e6eed121ae055e0456ca228bf693984bc/asttokens-3.0.0.tar.gz", hash = "sha256:0dcd8baa8d62b0c1d118b399b2ddba3c4aff271d0d7a9e0d4c1681c79035bbc7" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/25/8a/c46dcc25341b5bce5472c718902eb3d38600a903b14fa6aeecef3f21a46f/asttokens-3.0.0-py3-none-any.whl", hash = "sha256:e3078351a059199dd5138cb1c706e6430c05eff2ff136af5eb4790f9d28932e2" }, @@ -45,7 +45,7 @@ wheels = [ [[package]] name = "certifi" version = "2025.4.26" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3" }, @@ -54,7 +54,7 @@ wheels = [ [[package]] name = "click" version = "8.1.8" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] @@ -66,7 +66,7 @@ wheels = [ [[package]] name = "colorama" version = "0.4.6" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" }, @@ -75,7 +75,7 @@ wheels = [ [[package]] name = "coverage" version = "7.8.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/19/4f/2251e65033ed2ce1e68f00f91a0294e0f80c80ae8c3ebbe2f12828c4cd53/coverage-7.8.0.tar.gz", hash = "sha256:7a3d62b3b03b4b6fd41a085f3574874cf946cb4604d2b4d3e8dca8cd570ca501" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/78/01/1c5e6ee4ebaaa5e079db933a9a45f61172048c7efa06648445821a201084/coverage-7.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2931f66991175369859b5fd58529cd4b73582461877ecfd859b6549869287ffe" }, @@ -150,7 +150,7 @@ toml = [ [[package]] name = "decorator" version = "5.2.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a" }, @@ -158,7 +158,7 @@ wheels = [ [[package]] name = "dify-openapi" -version = "1.5.0" +version = "1.5.1" source = { editable = "." } dependencies = [ { name = "httpx" }, @@ -168,9 +168,9 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "ipython", version = "8.18.1", source = { registry = "http://mirrors.aliyun.com/pypi/simple/" }, marker = "python_full_version < '3.10'" }, - { name = "ipython", version = "8.36.0", source = { registry = "http://mirrors.aliyun.com/pypi/simple/" }, marker = "python_full_version == '3.10.*'" }, - { name = "ipython", version = "9.2.0", source = { registry = "http://mirrors.aliyun.com/pypi/simple/" }, marker = "python_full_version >= '3.11'" }, + { name = "ipython", version = "8.18.1", source = { registry = "http://mirrors.aliyun.com/pypi/simple" }, marker = "python_full_version < '3.10'" }, + { name = "ipython", version = "8.36.0", source = { registry = "http://mirrors.aliyun.com/pypi/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "ipython", version = "9.2.0", source = { registry = "http://mirrors.aliyun.com/pypi/simple" }, marker = "python_full_version >= '3.11'" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -198,7 +198,7 @@ dev = [ [[package]] name = "exceptiongroup" version = "1.2.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/09/35/2495c4ac46b980e4ca1f6ad6db102322ef3ad2410b79fdde159a4b0f3b92/exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/02/cc/b7e31358aac6ed1ef2bb790a9746ac2c69bcb3c8588b41616914eb106eaf/exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b" }, @@ -207,7 +207,7 @@ wheels = [ [[package]] name = "executing" version = "2.2.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/91/50/a9d80c47ff289c611ff12e63f7c5d13942c65d68125160cefd768c73e6e4/executing-2.2.0.tar.gz", hash = "sha256:5d108c028108fe2551d1a7b2e8b713341e2cb4fc0aa7dcf966fa4327a5226755" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/7b/8f/c4d9bafc34ad7ad5d8dc16dd1347ee0e507a52c3adb6bfa8887e1c6a26ba/executing-2.2.0-py2.py3-none-any.whl", hash = "sha256:11387150cad388d62750327a53d3339fad4888b39a6fe233c3afbb54ecffd3aa" }, @@ -216,7 +216,7 @@ wheels = [ [[package]] name = "h11" version = "0.16.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86" }, @@ -225,7 +225,7 @@ wheels = [ [[package]] name = "httpcore" version = "1.0.9" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "certifi" }, { name = "h11" }, @@ -238,7 +238,7 @@ wheels = [ [[package]] name = "httpx" version = "0.28.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "anyio" }, { name = "certifi" }, @@ -253,7 +253,7 @@ wheels = [ [[package]] name = "httpx-sse" version = "0.4.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f" }, @@ -262,7 +262,7 @@ wheels = [ [[package]] name = "idna" version = "3.10" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" }, @@ -271,7 +271,7 @@ wheels = [ [[package]] name = "iniconfig" version = "2.1.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760" }, @@ -280,7 +280,7 @@ wheels = [ [[package]] name = "ipython" version = "8.18.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } resolution-markers = [ "python_full_version < '3.10'", ] @@ -305,7 +305,7 @@ wheels = [ [[package]] name = "ipython" version = "8.36.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } resolution-markers = [ "python_full_version == '3.10.*'", ] @@ -330,7 +330,7 @@ wheels = [ [[package]] name = "ipython" version = "9.2.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } resolution-markers = [ "python_full_version >= '3.13'", "python_full_version >= '3.12.4' and python_full_version < '3.13'", @@ -357,7 +357,7 @@ wheels = [ [[package]] name = "ipython-pygments-lexers" version = "1.1.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "pygments", marker = "python_full_version >= '3.11'" }, ] @@ -369,7 +369,7 @@ wheels = [ [[package]] name = "jedi" version = "0.19.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "parso" }, ] @@ -381,7 +381,7 @@ wheels = [ [[package]] name = "markdown-it-py" version = "3.0.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "mdurl" }, ] @@ -393,7 +393,7 @@ wheels = [ [[package]] name = "matplotlib-inline" version = "0.1.7" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "traitlets" }, ] @@ -405,7 +405,7 @@ wheels = [ [[package]] name = "mdurl" version = "0.1.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8" }, @@ -414,7 +414,7 @@ wheels = [ [[package]] name = "packaging" version = "24.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/d0/63/68dbb6eb2de9cb10ee4c9c14a0148804425e13c4fb20d61cce69f53106da/packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/88/ef/eb23f262cca3c0c4eb7ab1933c3b1f03d021f2c48f54763065b6f0e321be/packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759" }, @@ -423,7 +423,7 @@ wheels = [ [[package]] name = "parso" version = "0.8.4" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/66/94/68e2e17afaa9169cf6412ab0f28623903be73d1b32e208d9e8e541bb086d/parso-0.8.4.tar.gz", hash = "sha256:eb3a7b58240fb99099a345571deecc0f9540ea5f4dd2fe14c2a99d6b281ab92d" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18" }, @@ -432,7 +432,7 @@ wheels = [ [[package]] name = "pexpect" version = "4.9.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "ptyprocess" }, ] @@ -444,7 +444,7 @@ wheels = [ [[package]] name = "pluggy" version = "1.5.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/96/2d/02d4312c973c6050a18b314a5ad0b3210edb65a906f868e31c111dede4a6/pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/88/5f/e351af9a41f866ac3f1fac4ca0613908d9a41741cfcf2228f4ad853b697d/pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669" }, @@ -453,7 +453,7 @@ wheels = [ [[package]] name = "prompt-toolkit" version = "3.0.51" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "wcwidth" }, ] @@ -465,7 +465,7 @@ wheels = [ [[package]] name = "ptyprocess" version = "0.7.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/20/e5/16ff212c1e452235a90aeb09066144d0c5a6a8c0834397e03f5224495c4e/ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35" }, @@ -474,7 +474,7 @@ wheels = [ [[package]] name = "pure-eval" version = "0.2.3" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/cd/05/0a34433a064256a578f1783a10da6df098ceaa4a57bbeaa96a6c0352786b/pure_eval-0.2.3.tar.gz", hash = "sha256:5f4e983f40564c576c7c8635ae88db5956bb2229d7e9237d03b3c0b0190eaf42" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0" }, @@ -483,7 +483,7 @@ wheels = [ [[package]] name = "pydantic" version = "2.11.3" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "annotated-types" }, { name = "pydantic-core" }, @@ -498,7 +498,7 @@ wheels = [ [[package]] name = "pydantic-core" version = "2.33.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -607,7 +607,7 @@ wheels = [ [[package]] name = "pygments" version = "2.19.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c" }, @@ -616,7 +616,7 @@ wheels = [ [[package]] name = "pytest" version = "8.3.5" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, @@ -633,7 +633,7 @@ wheels = [ [[package]] name = "pytest-asyncio" version = "0.26.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "pytest" }, { name = "typing-extensions", marker = "python_full_version < '3.10'" }, @@ -646,7 +646,7 @@ wheels = [ [[package]] name = "pytest-cov" version = "6.1.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pytest" }, @@ -659,7 +659,7 @@ wheels = [ [[package]] name = "pyyaml" version = "6.0.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086" }, @@ -712,7 +712,7 @@ wheels = [ [[package]] name = "rich" version = "14.0.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "markdown-it-py" }, { name = "pygments" }, @@ -726,7 +726,7 @@ wheels = [ [[package]] name = "shellingham" version = "1.5.4" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686" }, @@ -735,7 +735,7 @@ wheels = [ [[package]] name = "sniffio" version = "1.3.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2" }, @@ -744,7 +744,7 @@ wheels = [ [[package]] name = "stack-data" version = "0.6.3" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "asttokens" }, { name = "executing" }, @@ -758,7 +758,7 @@ wheels = [ [[package]] name = "tomli" version = "2.2.1" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249" }, @@ -797,7 +797,7 @@ wheels = [ [[package]] name = "traitlets" version = "5.14.3" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/eb/79/72064e6a701c2183016abbbfedaba506d81e30e232a68c9f0d6f6fcd1574/traitlets-5.14.3.tar.gz", hash = "sha256:9ed0579d3502c94b4b3732ac120375cda96f923114522847de4b3bb98b96b6b7" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/00/c0/8f5d070730d7836adc9c9b6408dec68c6ced86b304a9b26a14df072a6e8c/traitlets-5.14.3-py3-none-any.whl", hash = "sha256:b74e89e397b1ed28cc831db7aea759ba6640cb3de13090ca145426688ff1ac4f" }, @@ -806,7 +806,7 @@ wheels = [ [[package]] name = "typer" version = "0.15.3" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "click" }, { name = "rich" }, @@ -821,7 +821,7 @@ wheels = [ [[package]] name = "typing-extensions" version = "4.13.2" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/f6/37/23083fcd6e35492953e8d2aaaa68b860eb422b34627b13f2ce3eb6106061/typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/8b/54/b1ae86c0973cc6f0210b53d508ca3641fb6d0c56823f288d108bc7ab3cc8/typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c" }, @@ -830,7 +830,7 @@ wheels = [ [[package]] name = "typing-inspection" version = "0.4.0" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } dependencies = [ { name = "typing-extensions" }, ] @@ -842,7 +842,7 @@ wheels = [ [[package]] name = "wcwidth" version = "0.2.13" -source = { registry = "http://mirrors.aliyun.com/pypi/simple/" } +source = { registry = "http://mirrors.aliyun.com/pypi/simple" } sdist = { url = "http://mirrors.aliyun.com/pypi/packages/6c/63/53559446a878410fc5a5974feb13d31d78d752eb18aeba59c7fef1af7598/wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5" } wheels = [ { url = "http://mirrors.aliyun.com/pypi/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859" }, From 95a8ccafdb9294d0e2f302bdfcf98f6e94959b75 Mon Sep 17 00:00:00 2001 From: straydragon Date: Thu, 3 Jul 2025 23:46:34 +0800 Subject: [PATCH 5/6] fix: schema --- schema/knowledge_base.en.yaml | 17 +++++++++++++++++ schema/knowledge_base.zh.yaml | 18 ++++++++++++++++++ schema/overlays/knowledge_base.en.overlay.yaml | 4 ++++ 3 files changed, 39 insertions(+) diff --git a/schema/knowledge_base.en.yaml b/schema/knowledge_base.en.yaml index 1518378..4dcda9c 100644 --- a/schema/knowledge_base.en.yaml +++ b/schema/knowledge_base.en.yaml @@ -1126,6 +1126,23 @@ components: code: archived_document_immutable status: 403 message: The archived document is not editable. + Error404: + description: Resource not found + content: + application/json: + schema: + $ref: '#/components/schemas/Error' + examples: + document_not_found: + value: + code: document_not_found + status: 404 + message: Document not found. + dataset_not_found: + value: + code: dataset_not_found + status: 404 + message: Dataset not found. Error409: description: Resource conflict content: diff --git a/schema/knowledge_base.zh.yaml b/schema/knowledge_base.zh.yaml index 0860bc4..50be975 100644 --- a/schema/knowledge_base.zh.yaml +++ b/schema/knowledge_base.zh.yaml @@ -1100,6 +1100,24 @@ components: status: 403 message: The archived document is not editable. + Error404: + description: 资源不存在 + content: + application/json: + schema: + $ref: "#/components/schemas/Error" + examples: + document_not_found: + value: + code: document_not_found + status: 404 + message: Document not found. + dataset_not_found: + value: + code: dataset_not_found + status: 404 + message: Dataset not found. + Error409: description: 资源冲突 content: diff --git a/schema/overlays/knowledge_base.en.overlay.yaml b/schema/overlays/knowledge_base.en.overlay.yaml index 78e5695..85ff949 100644 --- a/schema/overlays/knowledge_base.en.overlay.yaml +++ b/schema/overlays/knowledge_base.en.overlay.yaml @@ -2078,3 +2078,7 @@ actions: - target: $.components.schemas.DocumentDetail.properties.doc_language.description update: "Document language" + + # Error404 response translations + - target: $.components.responses.Error404.description + update: "Resource not found" From c1f3d397a269ec3675334431f4faaad0cee7c20b Mon Sep 17 00:00:00 2001 From: straydragon Date: Thu, 3 Jul 2025 23:46:51 +0800 Subject: [PATCH 6/6] gen: sdk --- src/dify_sdk/knowledge_base/__init__.py | 36 +++- .../knowledge_base/documents/__init__.py | 2 + .../knowledge_base/documents/client.py | 87 +++++++++ .../knowledge_base/documents/raw_client.py | 169 ++++++++++++++++++ .../documents/types/__init__.py | 2 + .../get_document_detail_request_metadata.py | 5 + .../knowledge_base/errors/__init__.py | 10 +- .../knowledge_base/errors/not_found_error.py | 10 ++ src/dify_sdk/knowledge_base/types/__init__.py | 32 ++++ .../knowledge_base/types/document_detail.py | 155 ++++++++++++++++ .../types/document_detail_data_source_info.py | 26 +++ .../document_detail_dataset_process_rule.py | 32 ++++ ...ument_detail_dataset_process_rule_rules.py | 52 ++++++ ...ss_rule_rules_pre_processing_rules_item.py | 27 +++ ...dataset_process_rule_rules_segmentation.py | 36 ++++ ...rocess_rule_rules_subchunk_segmentation.py | 36 ++++ .../document_detail_document_process_rule.py | 42 +++++ ...ment_detail_document_process_rule_rules.py | 52 ++++++ ...ss_rule_rules_pre_processing_rules_item.py | 27 +++ ...ocument_process_rule_rules_segmentation.py | 36 ++++ ...rocess_rule_rules_subchunk_segmentation.py | 36 ++++ 21 files changed, 908 insertions(+), 2 deletions(-) create mode 100644 src/dify_sdk/knowledge_base/documents/types/get_document_detail_request_metadata.py create mode 100644 src/dify_sdk/knowledge_base/errors/not_found_error.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_data_source_info.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_pre_processing_rules_item.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_segmentation.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_subchunk_segmentation.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_document_process_rule.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_pre_processing_rules_item.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_segmentation.py create mode 100644 src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_subchunk_segmentation.py diff --git a/src/dify_sdk/knowledge_base/__init__.py b/src/dify_sdk/knowledge_base/__init__.py index 506db24..13cd81f 100644 --- a/src/dify_sdk/knowledge_base/__init__.py +++ b/src/dify_sdk/knowledge_base/__init__.py @@ -17,6 +17,18 @@ DatasetRetrievalModelDict, DatasetRetrievalModelDictRerankingModel, Document, + DocumentDetail, + DocumentDetailDataSourceInfo, + DocumentDetailDatasetProcessRule, + DocumentDetailDatasetProcessRuleRules, + DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem, + DocumentDetailDatasetProcessRuleRulesSegmentation, + DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation, + DocumentDetailDocumentProcessRule, + DocumentDetailDocumentProcessRuleRules, + DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem, + DocumentDetailDocumentProcessRuleRulesSegmentation, + DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation, DocumentDisplayStatus, DocumentSegment, DocumentSegmentStatus, @@ -49,7 +61,14 @@ UploadDocumentDataSourceType, UploadFile, ) -from .errors import BadRequestError, ConflictError, ContentTooLargeError, ForbiddenError, UnsupportedMediaTypeError +from .errors import ( + BadRequestError, + ConflictError, + ContentTooLargeError, + ForbiddenError, + NotFoundError, + UnsupportedMediaTypeError, +) from . import datasets, documents, metadata, models, segments, tags from .datasets import ( CreateDatasetRequestIndexingTechnique, @@ -70,6 +89,7 @@ CreateDocumentByTextRequestDocForm, CreateDocumentByTextRequestIndexingTechnique, CreateDocumentByTextResponse, + GetDocumentDetailRequestMetadata, GetDocumentIndexingStatusResponse, GetDocumentIndexingStatusResponseDataItem, GetDocumentListResponse, @@ -132,6 +152,18 @@ "DatasetRetrievalModelDict", "DatasetRetrievalModelDictRerankingModel", "Document", + "DocumentDetail", + "DocumentDetailDataSourceInfo", + "DocumentDetailDatasetProcessRule", + "DocumentDetailDatasetProcessRuleRules", + "DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem", + "DocumentDetailDatasetProcessRuleRulesSegmentation", + "DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation", + "DocumentDetailDocumentProcessRule", + "DocumentDetailDocumentProcessRuleRules", + "DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem", + "DocumentDetailDocumentProcessRuleRulesSegmentation", + "DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation", "DocumentDisplayStatus", "DocumentSegment", "DocumentSegmentStatus", @@ -147,6 +179,7 @@ "GetDatasetTagsResponse", "GetDatasetTagsResponseDataItem", "GetDatasetsDatasetIdDocumentsDocumentIdSegmentsSegmentIdChildChunksResponse", + "GetDocumentDetailRequestMetadata", "GetDocumentIndexingStatusResponse", "GetDocumentIndexingStatusResponseDataItem", "GetDocumentListResponse", @@ -162,6 +195,7 @@ "MetadataConditionValue", "MetadataFilteringConditions", "MetadataFilteringConditionsLogicalOperator", + "NotFoundError", "PatchDatasetsDatasetIdDocumentsDocumentIdSegmentsSegmentIdChildChunksChildChunkIdResponse", "PatchDatasetsDatasetIdRequestIndexingTechnique", "PatchDatasetsDatasetIdRequestPermission", diff --git a/src/dify_sdk/knowledge_base/documents/__init__.py b/src/dify_sdk/knowledge_base/documents/__init__.py index 7c8aeb7..03774b6 100644 --- a/src/dify_sdk/knowledge_base/documents/__init__.py +++ b/src/dify_sdk/knowledge_base/documents/__init__.py @@ -9,6 +9,7 @@ CreateDocumentByTextRequestDocForm, CreateDocumentByTextRequestIndexingTechnique, CreateDocumentByTextResponse, + GetDocumentDetailRequestMetadata, GetDocumentIndexingStatusResponse, GetDocumentIndexingStatusResponseDataItem, GetDocumentListResponse, @@ -25,6 +26,7 @@ "CreateDocumentByTextRequestDocForm", "CreateDocumentByTextRequestIndexingTechnique", "CreateDocumentByTextResponse", + "GetDocumentDetailRequestMetadata", "GetDocumentIndexingStatusResponse", "GetDocumentIndexingStatusResponseDataItem", "GetDocumentListResponse", diff --git a/src/dify_sdk/knowledge_base/documents/client.py b/src/dify_sdk/knowledge_base/documents/client.py index 9993748..63d8185 100644 --- a/src/dify_sdk/knowledge_base/documents/client.py +++ b/src/dify_sdk/knowledge_base/documents/client.py @@ -5,6 +5,7 @@ from ... import core from ...core.client_wrapper import AsyncClientWrapper, SyncClientWrapper from ...core.request_options import RequestOptions +from ..types.document_detail import DocumentDetail from ..types.process_rule import ProcessRule from ..types.upload_file import UploadFile from .raw_client import AsyncRawDocumentsClient, RawDocumentsClient @@ -14,6 +15,7 @@ from .types.create_document_by_text_request_doc_form import CreateDocumentByTextRequestDocForm from .types.create_document_by_text_request_indexing_technique import CreateDocumentByTextRequestIndexingTechnique from .types.create_document_by_text_response import CreateDocumentByTextResponse +from .types.get_document_detail_request_metadata import GetDocumentDetailRequestMetadata from .types.get_document_indexing_status_response import GetDocumentIndexingStatusResponse from .types.get_document_list_response import GetDocumentListResponse from .types.update_document_by_file_response import UpdateDocumentByFileResponse @@ -338,6 +340,47 @@ def get_document_list( ) return _response.data + def get_document_detail( + self, + dataset_id: str, + document_id: str, + *, + metadata: typing.Optional[GetDocumentDetailRequestMetadata] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocumentDetail: + """ + Get document detail + + Parameters + ---------- + dataset_id : str + Knowledge Base ID + + document_id : str + Document ID + + metadata : typing.Optional[GetDocumentDetailRequestMetadata] + metadata filter condition + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DocumentDetail + Successfully retrieved document detail + + Examples + -------- + from dify import DifyApi + client = DifyApi(token="YOUR_TOKEN", ) + client.knowledge_base.documents.get_document_detail(dataset_id='dataset_id', document_id='document_id', ) + """ + _response = self._raw_client.get_document_detail( + dataset_id, document_id, metadata=metadata, request_options=request_options + ) + return _response.data + def delete_document( self, dataset_id: str, document_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> None: @@ -774,6 +817,50 @@ async def main() -> None: ) return _response.data + async def get_document_detail( + self, + dataset_id: str, + document_id: str, + *, + metadata: typing.Optional[GetDocumentDetailRequestMetadata] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> DocumentDetail: + """ + Get document detail + + Parameters + ---------- + dataset_id : str + Knowledge Base ID + + document_id : str + Document ID + + metadata : typing.Optional[GetDocumentDetailRequestMetadata] + metadata filter condition + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + DocumentDetail + Successfully retrieved document detail + + Examples + -------- + from dify import AsyncDifyApi + import asyncio + client = AsyncDifyApi(token="YOUR_TOKEN", ) + async def main() -> None: + await client.knowledge_base.documents.get_document_detail(dataset_id='dataset_id', document_id='document_id', ) + asyncio.run(main()) + """ + _response = await self._raw_client.get_document_detail( + dataset_id, document_id, metadata=metadata, request_options=request_options + ) + return _response.data + async def delete_document( self, dataset_id: str, document_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> None: diff --git a/src/dify_sdk/knowledge_base/documents/raw_client.py b/src/dify_sdk/knowledge_base/documents/raw_client.py index 8af8cef..f3f11b7 100644 --- a/src/dify_sdk/knowledge_base/documents/raw_client.py +++ b/src/dify_sdk/knowledge_base/documents/raw_client.py @@ -14,7 +14,9 @@ from ..errors.bad_request_error import BadRequestError from ..errors.content_too_large_error import ContentTooLargeError from ..errors.forbidden_error import ForbiddenError +from ..errors.not_found_error import NotFoundError from ..errors.unsupported_media_type_error import UnsupportedMediaTypeError +from ..types.document_detail import DocumentDetail from ..types.error import Error from ..types.process_rule import ProcessRule from ..types.upload_file import UploadFile @@ -24,6 +26,7 @@ from .types.create_document_by_text_request_doc_form import CreateDocumentByTextRequestDocForm from .types.create_document_by_text_request_indexing_technique import CreateDocumentByTextRequestIndexingTechnique from .types.create_document_by_text_response import CreateDocumentByTextResponse +from .types.get_document_detail_request_metadata import GetDocumentDetailRequestMetadata from .types.get_document_indexing_status_response import GetDocumentIndexingStatusResponse from .types.get_document_list_response import GetDocumentListResponse from .types.update_document_by_file_response import UpdateDocumentByFileResponse @@ -581,6 +584,89 @@ def get_document_list( raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + def get_document_detail( + self, + dataset_id: str, + document_id: str, + *, + metadata: typing.Optional[GetDocumentDetailRequestMetadata] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> HttpResponse[DocumentDetail]: + """ + Get document detail + + Parameters + ---------- + dataset_id : str + Knowledge Base ID + + document_id : str + Document ID + + metadata : typing.Optional[GetDocumentDetailRequestMetadata] + metadata filter condition + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + HttpResponse[DocumentDetail] + Successfully retrieved document detail + """ + _response = self._client_wrapper.httpx_client.request( + f"datasets/{jsonable_encoder(dataset_id)}/documents/{jsonable_encoder(document_id)}", + method="GET", + params={ + "metadata": metadata, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DocumentDetail, + parse_obj_as( + type_=DocumentDetail, # type: ignore + object_=_response.json(), + ), + ) + return HttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + Error, + parse_obj_as( + type_=Error, # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 403: + raise ForbiddenError( + typing.cast( + Error, + parse_obj_as( + type_=Error, # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + def delete_document( self, dataset_id: str, document_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> HttpResponse[None]: @@ -1321,6 +1407,89 @@ async def get_document_list( raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + async def get_document_detail( + self, + dataset_id: str, + document_id: str, + *, + metadata: typing.Optional[GetDocumentDetailRequestMetadata] = None, + request_options: typing.Optional[RequestOptions] = None, + ) -> AsyncHttpResponse[DocumentDetail]: + """ + Get document detail + + Parameters + ---------- + dataset_id : str + Knowledge Base ID + + document_id : str + Document ID + + metadata : typing.Optional[GetDocumentDetailRequestMetadata] + metadata filter condition + + request_options : typing.Optional[RequestOptions] + Request-specific configuration. + + Returns + ------- + AsyncHttpResponse[DocumentDetail] + Successfully retrieved document detail + """ + _response = await self._client_wrapper.httpx_client.request( + f"datasets/{jsonable_encoder(dataset_id)}/documents/{jsonable_encoder(document_id)}", + method="GET", + params={ + "metadata": metadata, + }, + request_options=request_options, + ) + try: + if 200 <= _response.status_code < 300: + _data = typing.cast( + DocumentDetail, + parse_obj_as( + type_=DocumentDetail, # type: ignore + object_=_response.json(), + ), + ) + return AsyncHttpResponse(response=_response, data=_data) + if _response.status_code == 400: + raise BadRequestError( + typing.cast( + Error, + parse_obj_as( + type_=Error, # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 403: + raise ForbiddenError( + typing.cast( + Error, + parse_obj_as( + type_=Error, # type: ignore + object_=_response.json(), + ), + ) + ) + if _response.status_code == 404: + raise NotFoundError( + typing.cast( + typing.Optional[typing.Any], + parse_obj_as( + type_=typing.Optional[typing.Any], # type: ignore + object_=_response.json(), + ), + ) + ) + _response_json = _response.json() + except JSONDecodeError: + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response.text) + raise ApiError(headers=dict(_response.headers), status_code=_response.status_code, body=_response_json) + async def delete_document( self, dataset_id: str, document_id: str, *, request_options: typing.Optional[RequestOptions] = None ) -> AsyncHttpResponse[None]: diff --git a/src/dify_sdk/knowledge_base/documents/types/__init__.py b/src/dify_sdk/knowledge_base/documents/types/__init__.py index 52a4a17..029072b 100644 --- a/src/dify_sdk/knowledge_base/documents/types/__init__.py +++ b/src/dify_sdk/knowledge_base/documents/types/__init__.py @@ -8,6 +8,7 @@ from .create_document_by_text_request_doc_form import CreateDocumentByTextRequestDocForm from .create_document_by_text_request_indexing_technique import CreateDocumentByTextRequestIndexingTechnique from .create_document_by_text_response import CreateDocumentByTextResponse +from .get_document_detail_request_metadata import GetDocumentDetailRequestMetadata from .get_document_indexing_status_response import GetDocumentIndexingStatusResponse from .get_document_indexing_status_response_data_item import GetDocumentIndexingStatusResponseDataItem from .get_document_list_response import GetDocumentListResponse @@ -23,6 +24,7 @@ "CreateDocumentByTextRequestDocForm", "CreateDocumentByTextRequestIndexingTechnique", "CreateDocumentByTextResponse", + "GetDocumentDetailRequestMetadata", "GetDocumentIndexingStatusResponse", "GetDocumentIndexingStatusResponseDataItem", "GetDocumentListResponse", diff --git a/src/dify_sdk/knowledge_base/documents/types/get_document_detail_request_metadata.py b/src/dify_sdk/knowledge_base/documents/types/get_document_detail_request_metadata.py new file mode 100644 index 0000000..29e9e36 --- /dev/null +++ b/src/dify_sdk/knowledge_base/documents/types/get_document_detail_request_metadata.py @@ -0,0 +1,5 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +GetDocumentDetailRequestMetadata = typing.Union[typing.Literal["all", "only", "without"], typing.Any] diff --git a/src/dify_sdk/knowledge_base/errors/__init__.py b/src/dify_sdk/knowledge_base/errors/__init__.py index 9a5c3be..8e1c581 100644 --- a/src/dify_sdk/knowledge_base/errors/__init__.py +++ b/src/dify_sdk/knowledge_base/errors/__init__.py @@ -6,6 +6,14 @@ from .conflict_error import ConflictError from .content_too_large_error import ContentTooLargeError from .forbidden_error import ForbiddenError +from .not_found_error import NotFoundError from .unsupported_media_type_error import UnsupportedMediaTypeError -__all__ = ["BadRequestError", "ConflictError", "ContentTooLargeError", "ForbiddenError", "UnsupportedMediaTypeError"] +__all__ = [ + "BadRequestError", + "ConflictError", + "ContentTooLargeError", + "ForbiddenError", + "NotFoundError", + "UnsupportedMediaTypeError", +] diff --git a/src/dify_sdk/knowledge_base/errors/not_found_error.py b/src/dify_sdk/knowledge_base/errors/not_found_error.py new file mode 100644 index 0000000..dfc0fdf --- /dev/null +++ b/src/dify_sdk/knowledge_base/errors/not_found_error.py @@ -0,0 +1,10 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +from ...core.api_error import ApiError + + +class NotFoundError(ApiError): + def __init__(self, body: typing.Optional[typing.Any]): + super().__init__(status_code=404, body=body) diff --git a/src/dify_sdk/knowledge_base/types/__init__.py b/src/dify_sdk/knowledge_base/types/__init__.py index 62c7aa5..b98575d 100644 --- a/src/dify_sdk/knowledge_base/types/__init__.py +++ b/src/dify_sdk/knowledge_base/types/__init__.py @@ -16,6 +16,26 @@ from .dataset_retrieval_model_dict import DatasetRetrievalModelDict from .dataset_retrieval_model_dict_reranking_model import DatasetRetrievalModelDictRerankingModel from .document import Document +from .document_detail import DocumentDetail +from .document_detail_data_source_info import DocumentDetailDataSourceInfo +from .document_detail_dataset_process_rule import DocumentDetailDatasetProcessRule +from .document_detail_dataset_process_rule_rules import DocumentDetailDatasetProcessRuleRules +from .document_detail_dataset_process_rule_rules_pre_processing_rules_item import ( + DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem, +) +from .document_detail_dataset_process_rule_rules_segmentation import DocumentDetailDatasetProcessRuleRulesSegmentation +from .document_detail_dataset_process_rule_rules_subchunk_segmentation import ( + DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation, +) +from .document_detail_document_process_rule import DocumentDetailDocumentProcessRule +from .document_detail_document_process_rule_rules import DocumentDetailDocumentProcessRuleRules +from .document_detail_document_process_rule_rules_pre_processing_rules_item import ( + DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem, +) +from .document_detail_document_process_rule_rules_segmentation import DocumentDetailDocumentProcessRuleRulesSegmentation +from .document_detail_document_process_rule_rules_subchunk_segmentation import ( + DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation, +) from .document_display_status import DocumentDisplayStatus from .document_segment import DocumentSegment from .document_segment_status import DocumentSegmentStatus @@ -63,6 +83,18 @@ "DatasetRetrievalModelDict", "DatasetRetrievalModelDictRerankingModel", "Document", + "DocumentDetail", + "DocumentDetailDataSourceInfo", + "DocumentDetailDatasetProcessRule", + "DocumentDetailDatasetProcessRuleRules", + "DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem", + "DocumentDetailDatasetProcessRuleRulesSegmentation", + "DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation", + "DocumentDetailDocumentProcessRule", + "DocumentDetailDocumentProcessRuleRules", + "DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem", + "DocumentDetailDocumentProcessRuleRulesSegmentation", + "DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation", "DocumentDisplayStatus", "DocumentSegment", "DocumentSegmentStatus", diff --git a/src/dify_sdk/knowledge_base/types/document_detail.py b/src/dify_sdk/knowledge_base/types/document_detail.py new file mode 100644 index 0000000..fb4bc2b --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail.py @@ -0,0 +1,155 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .document_detail_data_source_info import DocumentDetailDataSourceInfo +from .document_detail_dataset_process_rule import DocumentDetailDatasetProcessRule +from .document_detail_document_process_rule import DocumentDetailDocumentProcessRule + + +class DocumentDetail(UniversalBaseModel): + id: typing.Optional[str] = pydantic.Field(default=None) + """ + Document ID + """ + + position: typing.Optional[int] = pydantic.Field(default=None) + """ + Document position + """ + + data_source_type: typing.Optional[str] = pydantic.Field(default=None) + """ + Data source type + """ + + data_source_info: typing.Optional[DocumentDetailDataSourceInfo] = pydantic.Field(default=None) + """ + Data source information + """ + + dataset_process_rule_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Dataset process rule ID + """ + + dataset_process_rule: typing.Optional[DocumentDetailDatasetProcessRule] = pydantic.Field(default=None) + """ + Dataset process rule + """ + + document_process_rule: typing.Optional[DocumentDetailDocumentProcessRule] = pydantic.Field(default=None) + """ + Document process rule + """ + + name: typing.Optional[str] = pydantic.Field(default=None) + """ + Document name + """ + + created_from: typing.Optional[str] = pydantic.Field(default=None) + """ + Creation source + """ + + created_by: typing.Optional[str] = pydantic.Field(default=None) + """ + Creator + """ + + created_at: typing.Optional[float] = pydantic.Field(default=None) + """ + Creation timestamp + """ + + tokens: typing.Optional[int] = pydantic.Field(default=None) + """ + Token count + """ + + indexing_status: typing.Optional[str] = pydantic.Field(default=None) + """ + Indexing status + """ + + completed_at: typing.Optional[float] = pydantic.Field(default=None) + """ + Completion timestamp + """ + + updated_at: typing.Optional[float] = pydantic.Field(default=None) + """ + Update timestamp + """ + + indexing_latency: typing.Optional[float] = pydantic.Field(default=None) + """ + Indexing latency + """ + + error: typing.Optional[str] = pydantic.Field(default=None) + """ + Error message + """ + + enabled: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether enabled + """ + + disabled_at: typing.Optional[float] = pydantic.Field(default=None) + """ + Disabled timestamp + """ + + disabled_by: typing.Optional[str] = pydantic.Field(default=None) + """ + Disabled by + """ + + archived: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether archived + """ + + segment_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Segment count + """ + + average_segment_length: typing.Optional[float] = pydantic.Field(default=None) + """ + Average segment length + """ + + hit_count: typing.Optional[int] = pydantic.Field(default=None) + """ + Hit count + """ + + display_status: typing.Optional[str] = pydantic.Field(default=None) + """ + Display status + """ + + doc_form: typing.Optional[str] = pydantic.Field(default=None) + """ + Document form + """ + + doc_language: typing.Optional[str] = pydantic.Field(default=None) + """ + Document language + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_data_source_info.py b/src/dify_sdk/knowledge_base/types/document_detail_data_source_info.py new file mode 100644 index 0000000..705b70d --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_data_source_info.py @@ -0,0 +1,26 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDataSourceInfo(UniversalBaseModel): + """ + Data source information + """ + + upload_file: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = pydantic.Field(default=None) + """ + Upload file information + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule.py b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule.py new file mode 100644 index 0000000..83f13a7 --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule.py @@ -0,0 +1,32 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .document_detail_dataset_process_rule_rules import DocumentDetailDatasetProcessRuleRules + + +class DocumentDetailDatasetProcessRule(UniversalBaseModel): + """ + Dataset process rule + """ + + mode: typing.Optional[str] = pydantic.Field(default=None) + """ + Process mode + """ + + rules: typing.Optional[DocumentDetailDatasetProcessRuleRules] = pydantic.Field(default=None) + """ + Rule details + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules.py b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules.py new file mode 100644 index 0000000..7fc42a8 --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules.py @@ -0,0 +1,52 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .document_detail_dataset_process_rule_rules_pre_processing_rules_item import ( + DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem, +) +from .document_detail_dataset_process_rule_rules_segmentation import DocumentDetailDatasetProcessRuleRulesSegmentation +from .document_detail_dataset_process_rule_rules_subchunk_segmentation import ( + DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation, +) + + +class DocumentDetailDatasetProcessRuleRules(UniversalBaseModel): + """ + Rule details + """ + + pre_processing_rules: typing.Optional[typing.List[DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem]] = ( + pydantic.Field(default=None) + ) + """ + Pre-processing rules list + """ + + segmentation: typing.Optional[DocumentDetailDatasetProcessRuleRulesSegmentation] = pydantic.Field(default=None) + """ + Segmentation rules + """ + + parent_mode: typing.Optional[str] = pydantic.Field(default=None) + """ + Parent mode + """ + + subchunk_segmentation: typing.Optional[DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation] = pydantic.Field( + default=None + ) + """ + Sub-chunk segmentation + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_pre_processing_rules_item.py b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_pre_processing_rules_item.py new file mode 100644 index 0000000..4a508bf --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_pre_processing_rules_item.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDatasetProcessRuleRulesPreProcessingRulesItem(UniversalBaseModel): + id: typing.Optional[str] = pydantic.Field(default=None) + """ + Rule ID + """ + + enabled: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether enabled + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_segmentation.py b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_segmentation.py new file mode 100644 index 0000000..2168d23 --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_segmentation.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDatasetProcessRuleRulesSegmentation(UniversalBaseModel): + """ + Segmentation rules + """ + + separator: typing.Optional[str] = pydantic.Field(default=None) + """ + Separator + """ + + max_tokens: typing.Optional[int] = pydantic.Field(default=None) + """ + Maximum tokens + """ + + chunk_overlap: typing.Optional[int] = pydantic.Field(default=None) + """ + Chunk overlap + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_subchunk_segmentation.py b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_subchunk_segmentation.py new file mode 100644 index 0000000..3c8861a --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_dataset_process_rule_rules_subchunk_segmentation.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDatasetProcessRuleRulesSubchunkSegmentation(UniversalBaseModel): + """ + Sub-chunk segmentation + """ + + separator: typing.Optional[str] = pydantic.Field(default=None) + """ + Separator + """ + + max_tokens: typing.Optional[int] = pydantic.Field(default=None) + """ + Maximum tokens + """ + + chunk_overlap: typing.Optional[int] = pydantic.Field(default=None) + """ + Chunk overlap + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule.py b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule.py new file mode 100644 index 0000000..546078e --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule.py @@ -0,0 +1,42 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .document_detail_document_process_rule_rules import DocumentDetailDocumentProcessRuleRules + + +class DocumentDetailDocumentProcessRule(UniversalBaseModel): + """ + Document process rule + """ + + id: typing.Optional[str] = pydantic.Field(default=None) + """ + Rule ID + """ + + dataset_id: typing.Optional[str] = pydantic.Field(default=None) + """ + Dataset ID + """ + + mode: typing.Optional[str] = pydantic.Field(default=None) + """ + Process mode + """ + + rules: typing.Optional[DocumentDetailDocumentProcessRuleRules] = pydantic.Field(default=None) + """ + Rule details + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules.py b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules.py new file mode 100644 index 0000000..75eb4ed --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules.py @@ -0,0 +1,52 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel +from .document_detail_document_process_rule_rules_pre_processing_rules_item import ( + DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem, +) +from .document_detail_document_process_rule_rules_segmentation import DocumentDetailDocumentProcessRuleRulesSegmentation +from .document_detail_document_process_rule_rules_subchunk_segmentation import ( + DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation, +) + + +class DocumentDetailDocumentProcessRuleRules(UniversalBaseModel): + """ + Rule details + """ + + pre_processing_rules: typing.Optional[typing.List[DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem]] = ( + pydantic.Field(default=None) + ) + """ + Pre-processing rules list + """ + + segmentation: typing.Optional[DocumentDetailDocumentProcessRuleRulesSegmentation] = pydantic.Field(default=None) + """ + Segmentation rules + """ + + parent_mode: typing.Optional[str] = pydantic.Field(default=None) + """ + Parent mode + """ + + subchunk_segmentation: typing.Optional[DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation] = pydantic.Field( + default=None + ) + """ + Sub-chunk segmentation + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_pre_processing_rules_item.py b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_pre_processing_rules_item.py new file mode 100644 index 0000000..c958aea --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_pre_processing_rules_item.py @@ -0,0 +1,27 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDocumentProcessRuleRulesPreProcessingRulesItem(UniversalBaseModel): + id: typing.Optional[str] = pydantic.Field(default=None) + """ + Rule ID + """ + + enabled: typing.Optional[bool] = pydantic.Field(default=None) + """ + Whether enabled + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_segmentation.py b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_segmentation.py new file mode 100644 index 0000000..afe86fb --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_segmentation.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDocumentProcessRuleRulesSegmentation(UniversalBaseModel): + """ + Segmentation rules + """ + + separator: typing.Optional[str] = pydantic.Field(default=None) + """ + Separator + """ + + max_tokens: typing.Optional[int] = pydantic.Field(default=None) + """ + Maximum tokens + """ + + chunk_overlap: typing.Optional[int] = pydantic.Field(default=None) + """ + Chunk overlap + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow diff --git a/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_subchunk_segmentation.py b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_subchunk_segmentation.py new file mode 100644 index 0000000..b89c3a2 --- /dev/null +++ b/src/dify_sdk/knowledge_base/types/document_detail_document_process_rule_rules_subchunk_segmentation.py @@ -0,0 +1,36 @@ +# This file was auto-generated by Fern from our API Definition. + +import typing + +import pydantic +from ...core.pydantic_utilities import IS_PYDANTIC_V2, UniversalBaseModel + + +class DocumentDetailDocumentProcessRuleRulesSubchunkSegmentation(UniversalBaseModel): + """ + Sub-chunk segmentation + """ + + separator: typing.Optional[str] = pydantic.Field(default=None) + """ + Separator + """ + + max_tokens: typing.Optional[int] = pydantic.Field(default=None) + """ + Maximum tokens + """ + + chunk_overlap: typing.Optional[int] = pydantic.Field(default=None) + """ + Chunk overlap + """ + + if IS_PYDANTIC_V2: + model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2 + else: + + class Config: + frozen = True + smart_union = True + extra = pydantic.Extra.allow