From f24581051c13e88830d3ef625425a0ef0f3ad1f8 Mon Sep 17 00:00:00 2001 From: thomashebrard Date: Thu, 15 Jan 2026 13:19:37 +0100 Subject: [PATCH 1/2] bump to Chicago --- .env.example | 2 +- .pipelex/inference/backends.toml | 36 +- .pipelex/inference/backends/anthropic.toml | 28 +- .pipelex/inference/backends/azure_openai.toml | 98 ++++- .pipelex/inference/backends/bedrock.toml | 25 +- .pipelex/inference/backends/blackboxai.toml | 32 +- .pipelex/inference/backends/fal.toml | 50 +++ .pipelex/inference/backends/google.toml | 41 +- .pipelex/inference/backends/groq.toml | 16 +- .pipelex/inference/backends/huggingface.toml | 43 ++ .pipelex/inference/backends/internal.toml | 10 +- .pipelex/inference/backends/mistral.toml | 31 +- .pipelex/inference/backends/openai.toml | 134 +++--- .../inference/backends/pipelex_gateway.toml | 41 ++ .../inference/backends/pipelex_inference.toml | 55 ++- .pipelex/inference/backends/portkey.toml | 263 ++++++++++++ .pipelex/inference/backends/scaleway.toml | 67 +++ .pipelex/inference/deck/base_deck.toml | 65 ++- .pipelex/inference/routing_profiles.toml | 74 ++-- .pipelex/pipelex.toml | 146 +++++-- .pipelex/pipelex_service.toml | 19 + .pipelex/telemetry.toml | 95 ++++- CHANGELOG.md | 6 + Makefiles/Makefile.local.mk | 2 +- README.md | 35 +- api/routes/helpers.py | 2 +- api/routes/pipelex/pipe_builder.py | 2 +- api/security.py | 28 +- docker-compose.yml | 3 +- docs/index.md | 62 ++- pyproject.toml | 9 +- uv.lock | 403 +++++++----------- 32 files changed, 1405 insertions(+), 518 deletions(-) create mode 100644 .pipelex/inference/backends/huggingface.toml create mode 100644 .pipelex/inference/backends/pipelex_gateway.toml create mode 100644 .pipelex/inference/backends/portkey.toml create mode 100644 .pipelex/inference/backends/scaleway.toml create mode 100644 .pipelex/pipelex_service.toml diff --git a/.env.example b/.env.example index 43dcd75..0bb5543 100644 --- a/.env.example +++ b/.env.example @@ -1,2 +1,2 @@ API_KEY= -PIPELEX_INFERENCE_API_KEY= \ No newline at end of file +PIPELEX_GATEWAY_API_KEY= \ No newline at end of file diff --git a/.pipelex/inference/backends.toml b/.pipelex/inference/backends.toml index c87d1ef..360a73d 100644 --- a/.pipelex/inference/backends.toml +++ b/.pipelex/inference/backends.toml @@ -10,16 +10,14 @@ # #################################################################################################### -[pipelex_inference] -display_name = "⭐ Pipelex Inference" -enabled = true -endpoint = "https://inference.pipelex.com/v1" -api_key = "${PIPELEX_INFERENCE_API_KEY}" +[pipelex_gateway] +display_name = "⭐ Pipelex Gateway" +enabled = true # Enable after accepting terms via `pipelex init config` +api_key = "${PIPELEX_GATEWAY_API_KEY}" [anthropic] enabled = false api_key = "${ANTHROPIC_API_KEY}" -claude_4_tokens_limit = 8192 [azure_openai] display_name = "Azure OpenAI" @@ -55,6 +53,11 @@ enabled = false endpoint = "https://api.groq.com/openai/v1" api_key = "${GROQ_API_KEY}" +[huggingface] +display_name = "Hugging Face" +enabled = false +api_key = "${HF_TOKEN}" + [mistral] display_name = "Mistral AI" enabled = false @@ -69,9 +72,21 @@ display_name = "OpenAI" enabled = false api_key = "${OPENAI_API_KEY}" +[portkey] +display_name = "Portkey" +enabled = false +endpoint = "https://api.portkey.ai/v1" +api_key = "${PORTKEY_API_KEY}" + +[scaleway] +display_name = "Scaleway" +enabled = false +endpoint = "${SCALEWAY_ENDPOINT}" +api_key = "${SCALEWAY_API_KEY}" + [vertexai] display_name = "Google Vertex AI" -enabled = false +enabled = false # This is the only one we disable beacuse setting it up requires internet access just to get credentials so it fails in CI sandboxes gcp_project_id = "${GCP_PROJECT_ID}" gcp_location = "${GCP_LOCATION}" gcp_credentials_file_path = "${GCP_CREDENTIALS_FILE_PATH}" @@ -84,3 +99,10 @@ api_key = "${XAI_API_KEY}" [internal] # software-only backend, runs internally, without AI enabled = true + +# Deprecated +[pipelex_inference] +display_name = "🛑 Legacy Pipelex Inference" +enabled = false +endpoint = "https://inference.pipelex.com/v1" +api_key = "${PIPELEX_INFERENCE_API_KEY}" diff --git a/.pipelex/inference/backends/anthropic.toml b/.pipelex/inference/backends/anthropic.toml index 6b9c912..2c94cd3 100644 --- a/.pipelex/inference/backends/anthropic.toml +++ b/.pipelex/inference/backends/anthropic.toml @@ -39,19 +39,11 @@ outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 0.25, output = 1.25 } -[claude-3-opus] -model_id = "claude-3-opus-20240229" -max_tokens = 4096 -inputs = ["text", "images"] -outputs = ["text", "structured"] -max_prompt_images = 100 -costs = { input = 15.0, output = 75.0 } - # --- Claude 3.7 Series -------------------------------------------------------- ["claude-3.7-sonnet"] model_id = "claude-3-7-sonnet-20250219" max_tokens = 8192 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -60,7 +52,7 @@ costs = { input = 3.0, output = 15.0 } [claude-4-sonnet] model_id = "claude-sonnet-4-20250514" max_tokens = 64000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -68,7 +60,7 @@ costs = { input = 3.0, output = 15.0 } [claude-4-opus] model_id = "claude-opus-4-20250514" max_tokens = 32000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -77,7 +69,7 @@ costs = { input = 3.0, output = 15.0 } ["claude-4.1-opus"] model_id = "claude-opus-4-1-20250805" max_tokens = 32000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -86,7 +78,7 @@ costs = { input = 3.0, output = 15.0 } ["claude-4.5-sonnet"] model_id = "claude-sonnet-4-5-20250929" max_tokens = 64000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -94,7 +86,15 @@ costs = { input = 3.0, output = 15.0 } ["claude-4.5-haiku"] model_id = "claude-haiku-4-5-20251001" max_tokens = 64000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 1.0, output = 5.0 } + +["claude-4.5-opus"] +model_id = "claude-opus-4-5-20251101" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 5.0, output = 25.0 } diff --git a/.pipelex/inference/backends/azure_openai.toml b/.pipelex/inference/backends/azure_openai.toml index 6054b12..8a89898 100644 --- a/.pipelex/inference/backends/azure_openai.toml +++ b/.pipelex/inference/backends/azure_openai.toml @@ -22,9 +22,9 @@ [defaults] model_type = "llm" -sdk = "azure_openai" +sdk = "azure_openai_responses" prompting_target = "openai" -structure_method = "instructor/openai_tools" +structure_method = "instructor/openai_responses_tools" ################################################################################ # LANGUAGE MODELS @@ -68,28 +68,28 @@ model_id = "o1-mini-2024-09-12" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 3.0, output = 12.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o1] model_id = "o1-2024-12-17" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 15.0, output = 60.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o3-mini] model_id = "o3-mini-2025-01-31" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 1.1, output = 4.4 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o3] model_id = "o3-2025-04-16" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 2, output = 8 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } # --- GPT-5 Series ------------------------------------------------------------- [gpt-5-mini] @@ -97,24 +97,64 @@ model_id = "gpt-5-mini-2025-08-07" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.25, output = 2.0 } +valued_constraints = { fixed_temperature = 1 } [gpt-5-nano] model_id = "gpt-5-nano-2025-08-07" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.05, output = 0.4 } +valued_constraints = { fixed_temperature = 1 } [gpt-5-chat] model_id = "gpt-5-chat-2025-08-07" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } [gpt-5] model_id = "gpt-5-2025-08-07" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +# --- GPT-5.1 Series ------------------------------------------------------------- +["gpt-5.1"] +model_id = "gpt-5.1-2025-11-13" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +["gpt-5.1-chat"] +model_id = "gpt-5.1-chat-2025-11-13" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +["gpt-5.1-codex"] +model_id = "gpt-5.1-codex-2025-11-13" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +# --- GPT-5.2 Series ------------------------------------------------------------- +["gpt-5.2"] +model_id = "gpt-5.2-2025-12-11" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.75, output = 14.0 } + +["gpt-5.2-chat"] +model_id = "gpt-5.2-chat-2025-12-11" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } ################################################################################ # IMAGE GENERATION MODELS @@ -122,8 +162,52 @@ costs = { input = 1.25, output = 10.0 } # --- OpenAI Image Generation -------------------------------------------------- [gpt-image-1] +sdk = "azure_rest_img_gen" model_type = "img_gen" model_id = "gpt-image-1-2025-04-15" inputs = ["text"] outputs = ["image"] -costs = { input = 0.04, output = 0.0 } +costs = { input = 10, output = 40 } + +[gpt-image-1.rules] +prompt = "positive_only" +num_images = "gpt" +aspect_ratio = "gpt" +background = "gpt" +inference = "gpt" +safety_checker = "unavailable" +output_format = "gpt" + +[gpt-image-1-mini] +sdk = "azure_rest_img_gen" +model_type = "img_gen" +model_id = "gpt-image-1-mini-2025-10-06" +inputs = ["text"] +outputs = ["image"] +costs = { input = 2.5, output = 8 } + +[gpt-image-1-mini.rules] +prompt = "positive_only" +num_images = "gpt" +aspect_ratio = "gpt" +background = "gpt" +inference = "gpt" +safety_checker = "unavailable" +output_format = "gpt" + +["gpt-image-1.5"] +sdk = "azure_rest_img_gen" +model_type = "img_gen" +model_id = "gpt-image-1.5-2025-12-16" +inputs = ["text"] +outputs = ["image"] +costs = { input = 8, output = 32 } + +["gpt-image-1.5".rules] +prompt = "positive_only" +num_images = "gpt" +aspect_ratio = "gpt" +background = "gpt" +inference = "gpt" +safety_checker = "unavailable" +output_format = "gpt" diff --git a/.pipelex/inference/backends/bedrock.toml b/.pipelex/inference/backends/bedrock.toml index cea9134..dc3a566 100644 --- a/.pipelex/inference/backends/bedrock.toml +++ b/.pipelex/inference/backends/bedrock.toml @@ -60,7 +60,7 @@ costs = { input = 3.0, output = 15.0 } sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-3-7-sonnet-20250219-v1:0" max_tokens = 8192 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -69,7 +69,7 @@ costs = { input = 3.0, output = 15.0 } sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-sonnet-4-20250514-v1:0" max_tokens = 64000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -78,7 +78,7 @@ costs = { input = 3.0, output = 15.0 } sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-opus-4-20250514-v1:0" max_tokens = 32000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -87,7 +87,7 @@ costs = { input = 3.0, output = 15.0 } sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-opus-4-1-20250805-v1:0" max_tokens = 32000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -95,8 +95,8 @@ costs = { input = 3.0, output = 15.0 } ["claude-4.5-sonnet"] sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-sonnet-4-5-20250929-v1:0" -max_tokens = 8192 -inputs = ["text", "images"] +max_tokens = 64000 +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 3.0, output = 15.0 } @@ -104,8 +104,17 @@ costs = { input = 3.0, output = 15.0 } ["claude-4.5-haiku"] sdk = "bedrock_anthropic" model_id = "us.anthropic.claude-haiku-4-5-20251001-v1:0" -max_tokens = 8192 -inputs = ["text", "images"] +max_tokens = 64000 +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 100 costs = { input = 1.0, output = 5.0 } + +["claude-4.5-opus"] +sdk = "bedrock_anthropic" +model_id = "global.anthropic.claude-opus-4-5-20251101-v1:0" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 5.0, output = 25.0 } diff --git a/.pipelex/inference/backends/blackboxai.toml b/.pipelex/inference/backends/blackboxai.toml index 405675b..ee8a105 100644 --- a/.pipelex/inference/backends/blackboxai.toml +++ b/.pipelex/inference/backends/blackboxai.toml @@ -73,7 +73,7 @@ inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 3.00, output = 15.00 } -[claude-opus-4] +[claude-4-opus] model_id = "blackboxai/anthropic/claude-opus-4" inputs = ["text", "images"] outputs = ["text", "structured"] @@ -104,12 +104,6 @@ inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 1.25, output = 10.00 } -["gemini-flash-1.5-8b"] -model_id = "blackboxai/google/gemini-flash-1.5-8b" -inputs = ["text", "images"] -outputs = ["text", "structured"] -costs = { input = 0.04, output = 0.15 } - # --- Mistral Models ----------------------------------------------------------- [mistral-large] model_id = "blackboxai/mistralai/mistral-large" @@ -167,13 +161,13 @@ costs = { input = 0.06, output = 0.24 } ################################################################################ # --- DeepSeek Free Models ----------------------------------------------------- -[deepseek-chat-free] +[deepseek-chat] model_id = "blackboxai/deepseek/deepseek-chat:free" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 0.00, output = 0.00 } -[deepseek-r1-free] +[deepseek-r1] model_id = "blackboxai/deepseek/deepseek-r1:free" inputs = ["text"] outputs = ["text", "structured"] @@ -193,7 +187,7 @@ costs = { input = 0.00, output = 0.00 } [flux-pro] model_type = "img_gen" -sdk = "openai_alt_img_gen" +sdk = "blackboxai_img_gen" model_id = "blackboxai/black-forest-labs/flux-pro" inputs = ["text"] outputs = ["image"] @@ -201,7 +195,7 @@ costs = { input = 0.0, output = 0.04 } ["flux-pro/v1.1"] model_type = "img_gen" -sdk = "openai_alt_img_gen" +sdk = "blackboxai_img_gen" model_id = "blackboxai/black-forest-labs/flux-1.1-pro" inputs = ["text"] outputs = ["image"] @@ -209,15 +203,15 @@ costs = { input = 0.0, output = 0.04 } ["flux-pro/v1.1-ultra"] model_type = "img_gen" -sdk = "openai_alt_img_gen" +sdk = "blackboxai_img_gen" model_id = "blackboxai/black-forest-labs/flux-1.1-pro-ultra" inputs = ["text"] outputs = ["image"] costs = { input = 0.0, output = 0.06 } -["fast-lightning-sdxl"] +[fast-lightning-sdxl] model_type = "img_gen" -sdk = "openai_alt_img_gen" +sdk = "blackboxai_img_gen" model_id = "blackboxai/bytedance/sdxl-lightning-4step" inputs = ["text"] outputs = ["image"] @@ -225,8 +219,16 @@ costs = { input = 0.0, output = 0.0014 } [nano-banana] model_type = "img_gen" -sdk = "openai_alt_img_gen" +sdk = "blackboxai_img_gen" model_id = "blackboxai/google/nano-banana" inputs = ["text"] outputs = ["image"] costs = { input = 0.0, output = 0.039 } + +[nano-banana-pro] +model_type = "img_gen" +sdk = "blackboxai_img_gen" +model_id = "blackboxai/google/nano-banana-pro" +inputs = ["text"] +outputs = ["image"] +costs = { input = 0.0, output = 0.039 } diff --git a/.pipelex/inference/backends/fal.toml b/.pipelex/inference/backends/fal.toml index d54a25f..3433f99 100644 --- a/.pipelex/inference/backends/fal.toml +++ b/.pipelex/inference/backends/fal.toml @@ -36,18 +36,60 @@ inputs = ["text"] outputs = ["image"] costs = { input = 0.05, output = 0.0 } +[flux-pro.rules] +prompt = "positive_only" +num_images = "fal" +aspect_ratio = "flux" +inference = "flux" +safety_checker = "available" +output_format = "flux_1" +specific = "fal" + ["flux-pro/v1.1"] model_id = "fal-ai/flux-pro/v1.1" inputs = ["text"] outputs = ["image"] costs = { input = 0.05, output = 0.0 } +["flux-pro/v1.1".rules] +prompt = "positive_only" +num_images = "fal" +aspect_ratio = "flux" +inference = "flux" +safety_checker = "available" +output_format = "flux_1" +specific = "fal" + ["flux-pro/v1.1-ultra"] model_id = "fal-ai/flux-pro/v1.1-ultra" inputs = ["text"] outputs = ["image"] costs = { input = 0.06, output = 0.0 } +["flux-pro/v1.1-ultra".rules] +prompt = "positive_only" +num_images = "fal" +aspect_ratio = "flux_11_ultra" +inference = "flux_11_ultra" +safety_checker = "available" +output_format = "flux_1" +specific = "fal" + +[flux-2] +model_id = "fal-ai/flux-2" +inputs = ["text"] +outputs = ["image"] +costs = { input = 0.05, output = 0.0 } + +[flux-2.rules] +prompt = "positive_only" +num_images = "fal" +aspect_ratio = "flux" +inference = "flux" +safety_checker = "available" +output_format = "flux_2" +specific = "fal" + # --- SDXL models -------------------------------------------------------------- [fast-lightning-sdxl] model_id = "fal-ai/fast-lightning-sdxl" @@ -55,3 +97,11 @@ inputs = ["text"] outputs = ["image"] costs = { input = 0.0003, output = 0.0 } +[fast-lightning-sdxl.rules] +prompt = "positive_only" +num_images = "fal" +aspect_ratio = "flux" +inference = "sdxl_lightning" +safety_checker = "unavailable" +output_format = "sdxl" +specific = "fal" diff --git a/.pipelex/inference/backends/google.toml b/.pipelex/inference/backends/google.toml index ff0040a..de001aa 100644 --- a/.pipelex/inference/backends/google.toml +++ b/.pipelex/inference/backends/google.toml @@ -33,7 +33,7 @@ structure_method = "instructor/genai_tools" # --- Gemini 2.0 Series ---------------------------------------- ["gemini-2.0-flash"] model_id = "gemini-2.0-flash" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 3000 costs = { input = 0.10, output = 0.40 } @@ -41,21 +41,54 @@ costs = { input = 0.10, output = 0.40 } # --- Gemini 2.5 Series ---------------------------------------- ["gemini-2.5-pro"] model_id = "gemini-2.5-pro" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 3000 costs = { input = 1.25, output = 10.0 } ["gemini-2.5-flash"] model_id = "gemini-2.5-flash" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 3000 costs = { input = 0.30, output = 2.50 } ["gemini-2.5-flash-lite"] model_id = "gemini-2.5-flash-lite" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] max_prompt_images = 3000 costs = { input = 0.10, output = 0.40 } + +# --- Gemini 3.0 Series ---------------------------------------- +["gemini-3.0-pro"] +model_id = "gemini-3-pro-preview" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 3000 +costs = { input = 2, output = 12.0 } + +["gemini-3.0-flash-preview"] +model_id = "gemini-3-flash-preview" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 3000 +costs = { input = 0.5, output = 3.0 } + +################################################################################ +# IMAGE GENERATION MODELS (Nano Banana) +################################################################################ + +[nano-banana] +model_type = "img_gen" +model_id = "gemini-2.5-flash-image" +inputs = ["text"] +outputs = ["image"] +costs = { input = 0.0, output = 0.039 } + +[nano-banana-pro] +model_type = "img_gen" +model_id = "gemini-3-pro-image-preview" +inputs = ["text"] +outputs = ["image"] +costs = { input = 0.0, output = 0.039 } diff --git a/.pipelex/inference/backends/groq.toml b/.pipelex/inference/backends/groq.toml index 72fc0b8..72bdae3 100644 --- a/.pipelex/inference/backends/groq.toml +++ b/.pipelex/inference/backends/groq.toml @@ -46,7 +46,7 @@ outputs = ["text", "structured"] costs = { input = 0.59, output = 0.79 } # --- Meta Llama Guard --------------------------------------------------------- -["meta-llama/llama-guard-4-12b"] +[llama-guard-4-12b] model_id = "meta-llama/llama-guard-4-12b" max_tokens = 1024 inputs = ["text"] @@ -54,14 +54,14 @@ outputs = ["text", "structured"] costs = { input = 0.20, output = 0.20 } # --- OpenAI GPT-OSS Models ---------------------------------------------------- -["openai/gpt-oss-20b"] +[gpt-oss-20b] model_id = "openai/gpt-oss-20b" max_tokens = 65536 inputs = ["text"] outputs = ["text", "structured"] costs = { input = 0.075, output = 0.30 } -["openai/gpt-oss-120b"] +[gpt-oss-120b] model_id = "openai/gpt-oss-120b" max_tokens = 65536 inputs = ["text"] @@ -88,7 +88,7 @@ costs = { input = 0.10, output = 0.30 } ################################################################################ # --- Meta Llama 4 Vision Models (Preview) ------------------------------------- -["meta-llama/llama-4-scout-17b-16e-instruct"] +[llama-4-scout-17b-16e-instruct] model_id = "meta-llama/llama-4-scout-17b-16e-instruct" max_tokens = 8192 inputs = ["text", "images"] @@ -96,7 +96,7 @@ outputs = ["text", "structured"] max_prompt_images = 5 costs = { input = 0.11, output = 0.34 } -["meta-llama/llama-4-maverick-17b-128e-instruct"] +[llama-4-maverick-17b-128e-instruct] model_id = "meta-llama/llama-4-maverick-17b-128e-instruct" max_tokens = 8192 inputs = ["text", "images"] @@ -105,7 +105,7 @@ max_prompt_images = 5 costs = { input = 0.20, output = 0.60 } # --- Moonshot Kimi K2 --------------------------------------------------------- -["moonshotai/kimi-k2-instruct-0905"] +[kimi-k2-instruct-0905] model_id = "moonshotai/kimi-k2-instruct-0905" max_tokens = 16384 inputs = ["text"] @@ -113,7 +113,7 @@ outputs = ["text", "structured"] costs = { input = 1.00, output = 3.00 } # --- OpenAI Safety Model ------------------------------------------------------ -["openai/gpt-oss-safeguard-20b"] +[gpt-oss-safeguard-20b] model_id = "openai/gpt-oss-safeguard-20b" max_tokens = 65536 inputs = ["text"] @@ -121,7 +121,7 @@ outputs = ["text", "structured"] costs = { input = 0.075, output = 0.30 } # --- Qwen 3 ------------------------------------------------------------------- -["qwen/qwen3-32b"] +[qwen3-32b] model_id = "qwen/qwen3-32b" max_tokens = 40960 inputs = ["text"] diff --git a/.pipelex/inference/backends/huggingface.toml b/.pipelex/inference/backends/huggingface.toml new file mode 100644 index 0000000..1a79638 --- /dev/null +++ b/.pipelex/inference/backends/huggingface.toml @@ -0,0 +1,43 @@ +################################################################################ +# Hugging Face Backend Configuration +################################################################################ +# +# This file defines the model specifications for Hugging Face models. +# It contains model definitions for various image generation models +# accessible through the Hugging Face Inference API with provider="auto". +# +# Configuration structure: +# - Each model is defined in its own section with the model name as the header +# - Headers with dots or slashes must be quoted (e.g., ["stabilityai/stable-diffusion-2-1"]) +# - Model costs are in USD per million tokens (input/output) +# +# Documentation: https://docs.pipelex.com +# Support: https://go.pipelex.com/discord +# +################################################################################ + +################################################################################ +# MODEL DEFAULTS +################################################################################ + +[defaults] +model_type = "img_gen" +sdk = "huggingface_img_gen" + +################################################################################ +# IMAGE GENERATION MODELS +################################################################################ + +# --- Qwen Image Models -------------------------------------------------- +[qwen-image] +model_id = "Qwen/Qwen-Image" +inputs = ["text"] +outputs = ["image"] +costs = { input = 0.0, output = 0.0 } +variant = "fal-ai" +# variant = "replicate" + +[qwen-image.rules] +prompt = "with_negative" +aspect_ratio = "qwen_image" +inference = "qwen_image" diff --git a/.pipelex/inference/backends/internal.toml b/.pipelex/inference/backends/internal.toml index 5bb9683..e44b222 100644 --- a/.pipelex/inference/backends/internal.toml +++ b/.pipelex/inference/backends/internal.toml @@ -19,7 +19,7 @@ ################################################################################ # --- PyPDFium2 Text Extractor ------------------------------------------------- -[pypdfium2-extract-text] +[pypdfium2-extract-pdf] model_type = "text_extractor" sdk = "pypdfium2" model_id = "extract-text" @@ -27,3 +27,11 @@ inputs = ["pdf"] outputs = ["pages"] costs = {} +# --- Docling Text Extractor --------------------------------------------------- +[docling-extract-text] +model_type = "text_extractor" +sdk = "docling_sdk" +model_id = "extract-text" +inputs = ["pdf", "image"] +outputs = ["pages"] +costs = {} diff --git a/.pipelex/inference/backends/mistral.toml b/.pipelex/inference/backends/mistral.toml index c4b49e3..e131f8e 100644 --- a/.pipelex/inference/backends/mistral.toml +++ b/.pipelex/inference/backends/mistral.toml @@ -118,14 +118,14 @@ costs = { input = 2.0, output = 6.0 } [mistral-medium] model_id = "mistral-medium-latest" max_tokens = 128000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.4, output = 2.0 } [mistral-medium-2508] model_id = "mistral-medium-2508" max_tokens = 128000 -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.4, output = 2.0 } @@ -133,11 +133,32 @@ costs = { input = 0.4, output = 2.0 } # EXTRACTION MODELS ################################################################################ -# --- OCR Models --------------------------------------------------------------- +# TODO: add support to pricing per page + +[mistral-ocr-2503] +model_type = "text_extractor" +model_id = "mistral-ocr-2503" +max_tokens = 16384 +inputs = ["pdf", "image"] +outputs = ["pages"] + +[mistral-ocr-2505] +model_type = "text_extractor" +model_id = "mistral-ocr-2505" +max_tokens = 16384 +inputs = ["pdf", "image"] +outputs = ["pages"] + +[mistral-ocr-2512] +model_type = "text_extractor" +model_id = "mistral-ocr-2512" +max_tokens = 16384 +inputs = ["pdf", "image"] +outputs = ["pages"] + [mistral-ocr] model_type = "text_extractor" model_id = "mistral-ocr-latest" -max_tokens = 131072 +max_tokens = 16384 inputs = ["pdf", "image"] outputs = ["pages"] -costs = { input = 0.4, output = 2.0 } diff --git a/.pipelex/inference/backends/openai.toml b/.pipelex/inference/backends/openai.toml index 4853332..0f0b1dc 100644 --- a/.pipelex/inference/backends/openai.toml +++ b/.pipelex/inference/backends/openai.toml @@ -22,9 +22,9 @@ [defaults] model_type = "llm" -sdk = "openai" +sdk = "openai_responses" prompting_target = "openai" -structure_method = "instructor/openai_tools" +structure_method = "instructor/openai_responses_tools" ################################################################################ # LANGUAGE MODELS @@ -39,123 +39,146 @@ costs = { input = 0.5, output = 1.5 } # --- GPT-4 Series ------------------------------------------------------------- [gpt-4] -model_id = "gpt-4" +inputs = ["text"] +outputs = ["text"] costs = { input = 30.0, output = 60.0 } [gpt-4-turbo] -model_id = "gpt-4-turbo" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 10.0, output = 30.0 } # --- GPT-4o Series ------------------------------------------------------------ [gpt-4o-2024-11-20] -model_id = "gpt-4o-2024-11-20" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 2.5, output = 10.0 } [gpt-4o] -model_id = "gpt-4o" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 2.5, output = 10.0 } [gpt-4o-mini-2024-07-18] -model_id = "gpt-4o-mini-2024-07-18" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.15, output = 0.6 } [gpt-4o-mini] -model_id = "gpt-4o-mini" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.15, output = 0.6 } # --- GPT-4.1 Series ----------------------------------------------------------- ["gpt-4.1"] -model_id = "gpt-4.1" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 2, output = 8 } ["gpt-4.1-mini"] -model_id = "gpt-4.1-mini" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.4, output = 1.6 } ["gpt-4.1-nano"] -model_id = "gpt-4.1-nano" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.1, output = 0.4 } # --- o Series ---------------------------------------------------------------- -[o1-mini] -model_id = "o1-mini" -inputs = ["text"] -outputs = ["text"] -costs = { input = 3.0, output = 12.0 } -constraints = ["temperature_must_be_1"] - [o1] -model_id = "o1" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 15.0, output = 60.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o3-mini] -model_id = "o3-mini" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 1.1, output = 4.4 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o3] -model_id = "o3" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 10.0, output = 40.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [o4-mini] -model_id = "o4-mini" inputs = ["text"] outputs = ["text", "structured"] costs = { input = 1.1, output = 4.4 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } # --- GPT-5 Series ------------------------------------------------------------- [gpt-5] -model_id = "gpt-5" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 1.25, output = 10.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [gpt-5-mini] -model_id = "gpt-5-mini" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.25, output = 2.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [gpt-5-nano] -model_id = "gpt-5-nano" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text", "structured"] costs = { input = 0.05, output = 0.4 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } [gpt-5-chat] model_id = "gpt-5-chat-latest" -inputs = ["text", "images"] +inputs = ["text", "images", "pdf"] outputs = ["text"] costs = { input = 1.25, output = 10.0 } -constraints = ["temperature_must_be_1"] +valued_constraints = { fixed_temperature = 1 } + +[gpt-5-codex] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +# --- GPT-5.1 Series ------------------------------------------------------------- +["gpt-5.1"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } + +["gpt-5.1-chat"] +model_id = "gpt-5.1-chat-latest" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +["gpt-5.1-codex"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +["gpt-5.1-codex-max"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } + +# --- GPT-5.2 Series ------------------------------------------------------------- +["gpt-5.2"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.75, output = 14.0 } + +["gpt-5.2-chat"] +model_id = "gpt-5.2-chat-latest" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.75, output = 14.0 } +valued_constraints = { fixed_temperature = 1 } ################################################################################ # IMAGE GENERATION MODELS @@ -163,8 +186,23 @@ constraints = ["temperature_must_be_1"] # --- OpenAI Image Generation -------------------------------------------------- [gpt-image-1] +sdk = "openai_img_gen" +model_type = "img_gen" +inputs = ["text"] +outputs = ["image"] +costs = { input = 10, output = 40 } + +[gpt-image-1-mini] +sdk = "openai_img_gen" +model_type = "img_gen" +inputs = ["text"] +outputs = ["image"] +costs = { input = 2.5, output = 8 } + +["gpt-image-1.5"] +sdk = "openai_img_gen" model_type = "img_gen" -model_id = "gpt-image-1" +model_id = "gpt-image-1.5" inputs = ["text"] outputs = ["image"] -costs = { input = 0.04, output = 0.0 } +costs = { input = 8, output = 32 } diff --git a/.pipelex/inference/backends/pipelex_gateway.toml b/.pipelex/inference/backends/pipelex_gateway.toml new file mode 100644 index 0000000..bca075b --- /dev/null +++ b/.pipelex/inference/backends/pipelex_gateway.toml @@ -0,0 +1,41 @@ +################################################################################ +# Pipelex Gateway Local Overrides +################################################################################ +# +# TELEMETRY NOTICE: +# +# Using Pipelex Gateway enables identified telemetry tied to your API key +# (hashed for security). This is independent from your telemetry.toml settings. +# +# We collect only technical data (model names, token counts, latency, error rates). +# We do NOT collect prompts, completions, pipe codes, or business data. +# +# This allows us to monitor service quality, enforce fair usage, and support you. +# +################################################################################ +# +# WARNING: USE AT YOUR OWN RISK! +# +# The actual model configuration is fetched remotely from Pipelex servers. +# Any override in this file may cause unexpected behavior or failures, +# as the remote configuration may change at any time. +# +# If you must override, you may ONLY use these keys per model: +# - sdk +# - structure_method +# +# All other keys will be ignored. +# +# If you need custom configurations, consider using your own API keys +# with direct provider backends (openai, anthropic, etc.) instead. +# +# Documentation: +# https://docs.pipelex.com/home/7-configuration/config-technical/inference-backend-config/ +# Support: https://go.pipelex.com/discord +# +################################################################################ + +# Per-model overrides example: +# [gpt-4o] +# sdk = "gateway_completions" +# structure_method = "instructor/openai_tools" diff --git a/.pipelex/inference/backends/pipelex_inference.toml b/.pipelex/inference/backends/pipelex_inference.toml index 655105a..751c570 100644 --- a/.pipelex/inference/backends/pipelex_inference.toml +++ b/.pipelex/inference/backends/pipelex_inference.toml @@ -36,54 +36,88 @@ model_id = "pipelex/gpt-4o" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 2.75, output = 11.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" [gpt-4o-mini] model_id = "pipelex/gpt-4o-mini" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.17, output = 0.66 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" ["gpt-4.1"] model_id = "pipelex/gpt-4.1" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 2, output = 8 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" ["gpt-4.1-mini"] model_id = "pipelex/gpt-4.1-mini" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.4, output = 1.6 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" ["gpt-4.1-nano"] model_id = "pipelex/gpt-4.1-nano" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.1, output = 0.4 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" [gpt-5-nano] model_id = "pipelex/gpt-5-nano" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.05, output = 0.40 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" [gpt-5-mini] model_id = "pipelex/gpt-5-mini" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.25, output = 2.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" [gpt-5-chat] model_id = "pipelex/gpt-5-chat" inputs = ["text", "images"] outputs = ["text"] costs = { input = 1.25, output = 10.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" [gpt-5] model_id = "pipelex/gpt-5" inputs = ["text", "images"] outputs = ["text"] costs = { input = 1.25, output = 10.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" + +["gpt-5.1"] +model_id = "pipelex/gpt-5.1" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" + +["gpt-5.1-chat"] +model_id = "pipelex/gpt-5.1-chat" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.00 } +sdk = "openai_responses" +structure_method = "instructor/openai_responses_tools" # --- Claude LLMs -------------------------------------------------------------- ["claude-4-sonnet"] @@ -110,32 +144,45 @@ inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 1, output = 5 } +["claude-4.5-opus"] +model_id = "pipelex/claude-4.5-opus" +inputs = ["text", "images"] +outputs = ["text", "structured"] +costs = { input = 5, output = 25 } + # --- Gemini LLMs -------------------------------------------------------------- ["gemini-2.0-flash"] -model_id = "gemini/gemini-2.0-flash" +model_id = "pipelex/gemini-2.0-flash" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.10, output = 0.40 } ["gemini-2.5-pro"] -model_id = "gemini/gemini-2.5-pro" +model_id = "pipelex/gemini-2.5-pro" inputs = ["text", "images"] outputs = ["text", "structured"] max_prompt_images = 3000 costs = { input = 1.25, output = 10.0 } ["gemini-2.5-flash"] -model_id = "gemini/gemini-2.5-flash" +model_id = "pipelex/gemini-2.5-flash" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.30, output = 2.50 } ["gemini-2.5-flash-lite"] -model_id = "gemini/gemini-2.5-flash-lite" +model_id = "pipelex/gemini-2.5-flash-lite" inputs = ["text", "images"] outputs = ["text", "structured"] costs = { input = 0.10, output = 0.40 } +["gemini-3.0-pro"] +model_id = "pipelex/gemini-3.0-pro" +inputs = ["text", "images"] +outputs = ["text", "structured"] +max_prompt_images = 3000 +costs = { input = 2, output = 12.0 } + # --- XAI LLMs -------------------------------------------------------------- [grok-3] diff --git a/.pipelex/inference/backends/portkey.toml b/.pipelex/inference/backends/portkey.toml new file mode 100644 index 0000000..9a7ede7 --- /dev/null +++ b/.pipelex/inference/backends/portkey.toml @@ -0,0 +1,263 @@ +################################################################################ +# Portkey Configuration +################################################################################ +# +# This file defines the model specifications for the Portkey backend. +# It contains model definitions for various AI models. +# +# Configuration structure: +# - Each model is defined in its own section with the model name as the header +# - Headers with dots must be quoted (e.g., ["gpt-4.1"]) +# - Model costs are in USD per million tokens (input/output) +# +# Documentation: https://docs.pipelex.com +# Support: https://go.pipelex.com/discord +# +################################################################################ + +################################################################################ +# MODEL DEFAULTS +################################################################################ + +[defaults] +model_type = "llm" +sdk = "portkey_completions" +structure_method = "instructor/openai_tools" +prompting_target = "anthropic" + +################################################################################ +# LANGUAGE MODELS +################################################################################ + +# --- OpenAI LLMs -------------------------------------------------------------- +[gpt-4o-mini] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.15, output = 0.6 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[gpt-4o] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 2.5, output = 10.0 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +["gpt-4.1-nano"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.1, output = 0.4 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +["gpt-4.1-mini"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.4, output = 1.6 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +["gpt-4.1"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 2, output = 8 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[o1] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 15.0, output = 60.0 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[o3-mini] +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 1.1, output = 4.4 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[o3] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 2, output = 8 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[o4-mini] +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 1.1, output = 4.4 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[gpt-5-nano] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.05, output = 0.4 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[gpt-5-mini] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.25, output = 2.0 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +[gpt-5] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +["gpt-5.1"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +["gpt-5.1-codex"] +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 1.25, output = 10.0 } +valued_constraints = { fixed_temperature = 1 } +sdk = "portkey_responses" +structure_method = "instructor/openai_responses_tools" +x-portkey-provider = "@openai" + +# --- Claude LLMs -------------------------------------------------------------- +[claude-3-haiku] +model_id = "claude-3-haiku-20240307" +max_tokens = 4096 +inputs = ["text", "images"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 0.25, output = 1.25 } +x-portkey-provider = "@anthropic" + +["claude-3.7-sonnet"] +model_id = "claude-3-7-sonnet-20250219" +max_tokens = 8192 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 3.0, output = 15.0 } +x-portkey-provider = "@anthropic" + +[claude-4-sonnet] +model_id = "claude-sonnet-4-20250514" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 3.0, output = 15.0 } +x-portkey-provider = "@anthropic" + +[claude-4-opus] +model_id = "claude-opus-4-20250514" +max_tokens = 32000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 3.0, output = 15.0 } +x-portkey-provider = "@anthropic" + +["claude-4.1-opus"] +model_id = "claude-opus-4-1-20250805" +max_tokens = 32000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 3.0, output = 15.0 } +x-portkey-provider = "@anthropic" + +["claude-4.5-sonnet"] +model_id = "claude-sonnet-4-5-20250929" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 3.0, output = 15.0 } +x-portkey-provider = "@anthropic" + +["claude-4.5-haiku"] +model_id = "claude-haiku-4-5-20251001" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 1.0, output = 5.0 } +x-portkey-provider = "@anthropic" + +["claude-4.5-opus"] +model_id = "claude-opus-4-5-20251101" +max_tokens = 64000 +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 100 +costs = { input = 5.0, output = 25.0 } +x-portkey-provider = "@anthropic" + +# --- Gemini LLMs -------------------------------------------------------------- +["gemini-2.0-flash"] +model_id = "gemini-2.0-flash" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.10, output = 0.40 } +x-portkey-provider = "@google" + +["gemini-2.5-pro"] +model_id = "gemini-2.5-pro" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 3000 +costs = { input = 1.25, output = 10.0 } +x-portkey-provider = "@google" + +["gemini-2.5-flash"] +model_id = "gemini-2.5-flash" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.30, output = 2.50 } +x-portkey-provider = "@google" + +["gemini-2.5-flash-lite"] +model_id = "gemini-2.5-flash-lite" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +costs = { input = 0.10, output = 0.40 } +x-portkey-provider = "@google" + +["gemini-3.0-pro"] +model_id = "gemini-3-pro-preview" +inputs = ["text", "images", "pdf"] +outputs = ["text", "structured"] +max_prompt_images = 3000 +costs = { input = 2, output = 12.0 } +x-portkey-provider = "@google" diff --git a/.pipelex/inference/backends/scaleway.toml b/.pipelex/inference/backends/scaleway.toml new file mode 100644 index 0000000..20fe792 --- /dev/null +++ b/.pipelex/inference/backends/scaleway.toml @@ -0,0 +1,67 @@ +################################################################################ +# Groq Backend Configuration +################################################################################ +# +# This file defines the model specifications for Scaleway models. +# It contains model definitions for various LLM models accessible through +# the Groq API, including text-only and vision-capable models. +# +# Configuration structure: +# - Each model is defined in its own section with the model name as the header +# - Headers with dots or slashes must be quoted (e.g., ["meta-llama/llama-4-scout"]) +# - Model costs are in USD per million tokens (input/output) +# - Vision models support max 5 images per request, 33MP max resolution +# +# Documentation: https://docs.pipelex.com +# Support: https://go.pipelex.com/discord +# +################################################################################ + +################################################################################ +# MODEL DEFAULTS +################################################################################ + +[defaults] +model_type = "llm" +sdk = "openai" +structure_method = "instructor/json" + +# --- DeepSeek Models ---------------------------------------------------------- +[deepseek-r1-distill-llama-70b] +max_tokens = 32768 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.90, output = 0.90 } + +# --- Meta Llama 3.x Series ---------------------------------------------------- +["llama-3.1-8b-instruct"] +max_tokens = 131072 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.2, output = 0.2 } + +["llama-3.3-70b-instruct"] +max_tokens = 32768 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.90, output = 0.90 } + +# --- OpenAI GPT-OSS Models ---------------------------------------------------- +[gpt-oss-120b] +max_tokens = 65536 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.15, output = 0.60 } + +# --- Qwen 3 ------------------------------------------------------------------- +[qwen3-235b-a22b-instruct-2507] +max_tokens = 40960 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.75, output = 2.25 } + +[qwen3-coder-30b-a3b-instruct] +max_tokens = 40960 +inputs = ["text"] +outputs = ["text", "structured"] +costs = { input = 0.20, output = 0.80 } diff --git a/.pipelex/inference/deck/base_deck.toml b/.pipelex/inference/deck/base_deck.toml index 954a65d..63ab39e 100644 --- a/.pipelex/inference/deck/base_deck.toml +++ b/.pipelex/inference/deck/base_deck.toml @@ -22,20 +22,20 @@ base-gpt = "gpt-4o" base-gemini = "gemini-2.5-flash" base-mistral = "mistral-medium" base-groq = "llama-3.3-70b-versatile" +base-grok = "grok-4-fast-non-reasoning" -best-gpt = "gpt-4o" -best-claude = "claude-4.1-opus" -best-gemini = "gemini-2.5-pro" +best-gpt = "gpt-5.1" +best-claude = "claude-4.5-opus" +best-gemini = "gemini-3.0-pro" best-mistral = "mistral-medium" -best-grok = "grok-3" # Groq-specific aliases fast-groq = "llama-3.1-8b-instant" -vision-groq = "meta-llama/llama-4-scout-17b-16e-instruct" +vision-groq = "llama-4-scout-17b-16e-instruct" # Image generation aliases base-img-gen = "flux-pro/v1.1" -best-img-gen = "flux-pro/v1.1-ultra" +best-img-gen = "flux-2" fast-img-gen = "fast-lightning-sdxl" #################################################################################################### @@ -43,35 +43,49 @@ fast-img-gen = "fast-lightning-sdxl" #################################################################################################### [waterfalls] + +# --- Waterfalls for LLMs --------------------------------------------------------------------- smart_llm = [ + "claude-4.5-opus", "claude-4.5-sonnet", + "gemini-3.0-pro", + "gpt-5.1", "claude-4.1-opus", - "claude-4-sonnet", "gemini-2.5-pro", - "gpt-4o", # we're using gpt-4o here rather than gpt-5 until we'll handle reasoning parameters - "grok-3", + "claude-4-sonnet", + "grok-4-fast-non-reasoning", ] smart_llm_with_vision = [ + "claude-4.5-opus", "claude-4.5-sonnet", + "gemini-3.0-pro", + "gpt-5.1", "claude-4.1-opus", - "claude-4-sonnet", "gemini-2.5-pro", - "gpt-4o", # we use gpt-4o here rather than gpt-5 until we handle reasoning parameters - "grok-3", + "claude-4-sonnet", + "grok-4-fast-non-reasoning", ] smart_llm_for_structured = [ + "claude-4.5-opus", "claude-4.5-sonnet", + "gemini-3.0-pro", + "gpt-5.1", "claude-4.1-opus", "claude-4-sonnet", - "gpt-4o", # we use gpt-4o here rather than gpt-5 until we handle reasoning parameters ] -llm_for_creativity = ["claude-4.1-opus", "gemini-2.5-pro", "gpt-4o"] +llm_for_creativity = [ + "claude-4.5-opus", + "claude-4.1-opus", + "gemini-2.5-pro", + "gpt-5.1", +] llm_for_large_codebase = [ "gemini-2.5-pro", "claude-4.5-sonnet", - "gpt-4o", # we use gpt-4o here rather than gpt-5 until we handle reasoning parameters + "gemini-3.0-pro", + "gpt-5.1", "gemini-2.5-flash", - "grok-3", + "grok-4-fast-non-reasoning", ] cheap_llm = [ "gpt-4o-mini", @@ -81,8 +95,8 @@ cheap_llm = [ "grok-3-mini", ] cheap_llm_for_vision = [ - "gpt-4o-mini", "gemini-2.5-flash-lite", + "gpt-4o-mini", "claude-3-haiku", ] cheap_llm_for_structured = ["gpt-4o-mini", "mistral-small", "claude-3-haiku"] @@ -93,6 +107,14 @@ cheap_llm_for_creativity = [ "claude-4.5-haiku", ] +# --- Waterfalls for Extracts --------------------------------------------------------------------- +pdf_text_extractor = [ + "azure-document-intelligence", + "mistral-ocr", + "pypdfium2-extract-pdf", +] +image_text_extractor = ["mistral-ocr"] + #################################################################################################### # LLM Presets #################################################################################################### @@ -119,7 +141,7 @@ llm_to_analyze_large_codebase = { model = "base-claude", temperature = 0.1 } # Vision skills llm_for_img_to_text_cheap = { model = "gpt-4o-mini", temperature = 0.1 } llm_for_img_to_text = { model = "base-claude", temperature = 0.1 } -llm_for_diagram_to_text = { model = "base-claude", temperature = 0.3 } +llm_for_diagram_to_text = { model = "best-claude", temperature = 0.3 } llm_for_table_to_text = { model = "base-claude", temperature = 0.3 } # Image generation prompting skills @@ -151,11 +173,11 @@ for_object = "cheap_llm_for_structured" #################################################################################################### [extract] -choice_default = "extract_text_from_visuals" +choice_default = "extract_ocr_from_document" [extract.presets] -extract_text_from_visuals = { model = "mistral-ocr", max_nb_images = 100, image_min_size = 50 } -extract_text_from_pdf = { model = "pypdfium2-extract-text", max_nb_images = 100, image_min_size = 50 } +extract_ocr_from_document = { model = "azure-document-intelligence", max_nb_images = 100, image_min_size = 50 } +extract_basic_from_pdf = { model = "pypdfium2-extract-pdf", max_nb_images = 100, image_min_size = 50 } #################################################################################################### # Image Generation Presets @@ -170,6 +192,7 @@ choice_default = "gen_image_basic" gen_image_basic = { model = "base-img-gen", quality = "medium", guidance_scale = 7.5, is_moderated = true, safety_tolerance = 3 } gen_image_fast = { model = "fast-img-gen", nb_steps = 4, guidance_scale = 5.0, is_moderated = true, safety_tolerance = 3 } gen_image_high_quality = { model = "best-img-gen", quality = "high", guidance_scale = 8.0, is_moderated = true, safety_tolerance = 3 } +gen_image_openai_low_quality = { model = "gpt-image-1", quality = "low" } # Specific skills img_gen_for_art = { model = "best-img-gen", quality = "high", guidance_scale = 9.0, is_moderated = false, safety_tolerance = 5 } diff --git a/.pipelex/inference/routing_profiles.toml b/.pipelex/inference/routing_profiles.toml index fa3f1d4..bf40281 100644 --- a/.pipelex/inference/routing_profiles.toml +++ b/.pipelex/inference/routing_profiles.toml @@ -9,47 +9,53 @@ # ========================================================================================= # Which profile to use (change this to switch routing) -active = "pipelex_first" +active = "pipelex_gateway_first" -# We recommend using the "pipelex_first" profile to get a head start with all models. -# The Pipelex Inference backend is currently not recommended for production use, -# but it's great for development and testing. -# To use the Pipelex Inference backend (pipelex_first profile): +# We recommend using the "pipelex_gateway_first" profile to get a head start with all models. +# To use the Pipelex Gateway backend: # 1. Join our Discord community to get your free API key (no credit card required): # Visit https://go.pipelex.com/discord and request your key in the appropriate channel -# 2. Set the environment variable: export PIPELEX_INFERENCE_API_KEY="your-api-key" -# 3. The .pipelex/inference/backends.toml is already configured with api_key = "${PIPELEX_INFERENCE_API_KEY}" -# which will get the key from the environment variable. +# 2. Set the environment variable (or add it to your .env file): +# - Linux/macOS: export PIPELEX_GATEWAY_API_KEY="your-api-key" +# - Windows CMD: set PIPELEX_GATEWAY_API_KEY=your-api-key +# - Windows PowerShell: $env:PIPELEX_GATEWAY_API_KEY="your-api-key" +# 3. The .pipelex/inference/backends.toml is already configured with api_key = "${PIPELEX_GATEWAY_API_KEY}" +# which will get the key from the environment variable. # ========================================================================================= # Routing Profiles # ========================================================================================= -[profiles.pipelex_first] -description = "Use Pipelex Inference backend for all its supported models" -default = "pipelex_inference" +[profiles.pipelex_gateway_first] +description = "Use Pipelex Gateway backend for all its supported models" +default = "pipelex_gateway" fallback_order = [ - "pipelex_inference", + "pipelex_gateway", "azure_openai", "bedrock", + "google", "blackboxai", "mistral", "fal", ] -[profiles.pipelex_first.routes] +[profiles.pipelex_gateway_first.routes] # Pattern matching: "model-pattern" = "backend-name" -"gpt-*" = "pipelex_inference" -"claude-*" = "pipelex_inference" -"grok-*" = "pipelex_inference" -"gemini-*" = "pipelex_inference" -[profiles.pipelex_first.optional_routes] # Each optional route is considered only if its backend is available +[profiles.pipelex_gateway_first.optional_routes] # Each optional route is considered only if its backend is available +"gpt-*" = "pipelex_gateway" +"gpt-image-1" = "openai" +"claude-*" = "pipelex_gateway" +"grok-*" = "pipelex_gateway" +"gemini-*" = "pipelex_gateway" "*-sdxl" = "fal" "flux-*" = "fal" -"gpt-image-1" = "openai" "mistral-ocr" = "mistral" +[profiles.all_pipelex_gateway] +description = "Use Pipelex Gateway for all its supported models" +default = "pipelex_gateway" + [profiles.all_anthropic] description = "Use Anthropic backend for all its supported models" default = "anthropic" @@ -78,6 +84,10 @@ default = "google" description = "Use groq backend for all its supported models" default = "groq" +[profiles.all_huggingface] +description = "Use HuggingFace backend for all its supported models" +default = "huggingface" + [profiles.all_mistral] description = "Use Mistral backend for all its supported models" default = "mistral" @@ -90,18 +100,22 @@ default = "ollama" description = "Use OpenAI backend for all its supported models" default = "openai" -[profiles.all_pipelex_inference] -description = "Use Pipelex Inference backend for all its supported models" -default = "pipelex_inference" +[profiles.all_portkey] +description = "Use Portkey backend for all its supported models" +default = "portkey" -[profiles.all_vertexai] -description = "Use Vertex AI backend for all its supported models" -default = "vertexai" +[profiles.all_scaleway] +description = "Use Scaleway backend for all its supported models" +default = "scaleway" [profiles.all_xai] description = "Use xAI backend for all its supported models" default = "xai" +[profiles.all_internal] +description = "Use internal backend for all its supported models" +default = "internal" + # ========================================================================================= # Custom Profiles # ========================================================================================= @@ -123,7 +137,7 @@ default = "xai" # ========================================================================================= [profiles.example_routing_using_patterns] description = "Example routing profile using patterns" -default = "pipelex_inference" +default = "pipelex_gateway" [profiles.example_routing_using_patterns.routes] # Pattern matching: "model-pattern" = "backend-name" @@ -143,17 +157,17 @@ default = "pipelex_inference" description = "Example routing profile using specific models" [profiles.example_routing_using_specific_models.routes] -"gpt-5-nano" = "pipelex_inference" +"gpt-5-nano" = "pipelex_gateway" "gpt-4o-mini" = "blackboxai" "gpt-5-mini" = "openai" "gpt-5-chat" = "azure_openai" -"claude-4-sonnet" = "pipelex_inference" +"claude-4-sonnet" = "pipelex_gateway" "claude-3.7-sonnet" = "blackboxai" -"gemini-2.5-flash-lite" = "pipelex_inference" +"gemini-2.5-flash-lite" = "pipelex_gateway" "gemini-2.5-flash" = "blackboxai" "gemini-2.5-pro" = "vertexai" -"grok-3" = "pipelex_inference" +"grok-3" = "pipelex_gateway" "grok-3-mini" = "xai" diff --git a/.pipelex/pipelex.toml b/.pipelex/pipelex.toml index b0d36c0..7a7ddaf 100644 --- a/.pipelex/pipelex.toml +++ b/.pipelex/pipelex.toml @@ -2,11 +2,11 @@ # Pipelex Configuration File #################################################################################################### # -# This configuration file is copied to your project's .pipelex/ directory when you run: -# pipelex init config +# This configuration file is copied to client projects' .pipelex/ directory when running: +# `pipelex init config` # # Purpose: -# - This file allows you to override Pipelex's default settings for your specific project +# - This file allows to override Pipelex's default settings for specific projects # - Feel free to modify any settings below to suit your needs # - You can add any configuration sections that exist in the main pipelex.toml # @@ -14,11 +14,11 @@ # - See the full default configuration in: pipelex/pipelex.toml (in the Pipelex package) # - See the configuration structure classes in: pipelex/config.py and pipelex/cogt/config_cogt.py # -# Common Customizations: +# Common customizations are proposed below, such as: # - Logging levels and behavior # - Excluded directories for scanning # - LLM prompt dumping for debugging -# - Feature flags for tracking and reporting +# - Feature flags # - Observer and reporting output directories # # Documentation: https://docs.pipelex.com @@ -26,64 +26,136 @@ # #################################################################################################### +[pipelex.pipeline_execution_config] +# Uncomment to disable conversion of data URLs to pipelex-storage:// URIs +# is_normalize_data_urls_to_storage = false + +[pipelex.pipeline_execution_config.graph_config.data_inclusion] +# Uncomment to include stuff data in graph outputs: +stuff_json_content = true +stuff_text_content = true +stuff_html_content = true +error_stack_traces = true + +[pipelex.pipeline_execution_config.graph_config.graphs_inclusion] +# Uncomment to customize which graph outputs are generated (all enabled by default): +# graphspec_json = false +# mermaidflow_mmd = false +# mermaidflow_html = false +# reactflow_viewspec = false +# reactflow_html = false + +[pipelex.pipeline_execution_config.graph_config.reactflow_config] +# Uncomment to customize ReactFlow graph rendering: +# edge_type = "bezier" # Options: "bezier", "smoothstep", "step", "straight" +# nodesep = 50 # Horizontal spacing between nodes +# ranksep = 80 # Vertical spacing between ranks/levels +# initial_zoom = 1.0 # Initial zoom level (1.0 = 100%) +# pan_to_top = true # Pan to show top of graph on load + +[pipelex.storage_config] +# Storage method: "local" (default), "in_memory", "s3", or "gcp" +# method = "local" + +# Whether to fetch remote HTTP URLs and store them locally +# is_fetch_remote_content_enabled = true + +# --- Local Storage Configuration --- +# Uncomment to customize local storage settings: +[pipelex.storage_config.local] +# uri_format = "{primary_id}/{secondary_id}/{hash}.{extension}" +# local_storage_path = ".pipelex/storage" + +# --- AWS S3 Storage Configuration --- +# Uncomment to use S3 storage (requires boto3: `pip install pipelex[s3]`): +[pipelex.storage_config.s3] +# uri_format = "{primary_id}/{secondary_id}/{hash}.{extension}" +# bucket_name = "your-bucket-name" +# region = "us-east-1" +# signed_urls_lifespan_seconds = 3600 # Set to "disabled" for public URLs + +# --- Google Cloud Storage Configuration --- +# Uncomment to use GCP storage (requires google-cloud-storage: `pip install pipelex[gcp-storage]`): +[pipelex.storage_config.gcp] +# uri_format = "{primary_id}/{secondary_id}/{hash}.{extension}" +# bucket_name = "your-bucket-name" +# project_id = "your-project-id" +# signed_urls_lifespan_seconds = 3600 # Set to "disabled" for public URLs + [pipelex.scan_config] -excluded_dirs = [ - "env", - ".env", - ".git", - "__pycache__", - ".pytest_cache", - ".mypy_cache", - ".ruff_cache", - "results", -] +# Uncomment to customize the excluded directories for scanning +# excluded_dirs = [ +# ".venv", +# "venv", +# "env", +# ".env", +# "virtualenv", +# ".virtualenv", +# ".git", +# "__pycache__", +# ".pytest_cache", +# ".mypy_cache", +# ".ruff_cache", +# "node_modules", +# "results", +# ] [pipelex.builder_config] -# Uncomment to change where the generated pipelines are saved +# Uncomment to change where the generated pipelines are saved: # default_output_dir = "." # default_bundle_file_name = "bundle" # default_directory_base_name = "pipeline" [pipelex.log_config] -default_log_level = "INFO" +# Uncomment to change the default log level: +# default_log_level = "INFO" # Uncomment to log to stderr instead of stdout # console_log_target = "stderr" # console_print_target = "stderr" [pipelex.log_config.package_log_levels] -pipelex = "INFO" +# Uncomment to change the log level for specific packages: +# pipelex = "INFO" + +[pipelex.observer_config] +# Uncomment to change the directory where the observer will save its results: +# observer_dir = "results/observer" + +[pipelex.feature_config] +# WIP/Experimental feature flags: +# is_reporting_enabled = true + +[pipelex.reporting_config] +# Uncomment to customize the reporting configuration: +# is_log_costs_to_console = false +# is_generate_cost_report_file_enabled = false +# cost_report_dir_path = "reports" +# cost_report_base_name = "cost_report" +# cost_report_extension = "csv" +# cost_report_unit_scale = 1.0 [cogt] [cogt.model_deck_config] -# Uncomment to disable model fallback: it will raise errors instead of using secondary model options +# Uncomment to disable model fallback: it will raise errors instead of using secondary model options: # is_model_fallback_enabled = false # Uncomment to change the reaction to missing presets: "raise" (default), "log" or "none" # missing_presets_reaction = "raise" +[cogt.tenacity_config] +# Uncomment to change those values as needed: +# max_retries = 50 # Maximum number of retry attempts before giving up +# wait_multiplier = 0.2 # Multiplier applied to the wait time between retries (in seconds) +# wait_max = 20 # Maximum wait time between retries (in seconds) +# wait_exp_base = 1.3 # Base for exponential backoff calculation + [cogt.llm_config] -# Uncomment any of these to enable dumping the inputs or outputs of text-genration with an LLM +# Uncomment any of these to enable dumping the inputs or outputs of text-generation with an LLM: # is_dump_text_prompts_enabled = true # is_dump_response_text_enabled = true [cogt.llm_config.instructor_config] -# Uncomment any of these to enable dumping the kwargs, response or errors of the instructor +# Uncomment any of these to enable dumping the kwargs, response or errors when generating structured content: # is_dump_kwargs_enabled = true # is_dump_response_enabled = true # is_dump_error_enabled = true - -[pipelex.observer_config] -observer_dir = "results/observer" - -[pipelex.feature_config] -# WIP/Experimental feature flags -is_pipeline_tracking_enabled = false -is_reporting_enabled = true - -[pipelex.reporting_config] -is_log_costs_to_console = false -is_generate_cost_report_file_enabled = true -cost_report_dir_path = "reports" -cost_report_base_name = "cost_report" -cost_report_extension = "csv" -cost_report_unit_scale = 1.0 diff --git a/.pipelex/pipelex_service.toml b/.pipelex/pipelex_service.toml new file mode 100644 index 0000000..afe39a2 --- /dev/null +++ b/.pipelex/pipelex_service.toml @@ -0,0 +1,19 @@ +#################################################################################################### +# Pipelex Service Configuration +#################################################################################################### +# +# This file stores settings related to Pipelex managed services. +# Currently used for Pipelex Gateway terms acceptance. +# +# Documentation: https://docs.pipelex.com +# Support: https://go.pipelex.com/discord +# +#################################################################################################### + +[agreement] +# Set to true after accepting Pipelex terms of service. +terms_accepted = true + +# Note: when using pipelex_gateway, telemetry is enabled to monitor service usage. +# We collect technical data (model, pipe type...) and quantitative data (token counts...) +# but NOT your content, pipe codes, or output class names. diff --git a/.pipelex/telemetry.toml b/.pipelex/telemetry.toml index 4640a40..eb2c537 100644 --- a/.pipelex/telemetry.toml +++ b/.pipelex/telemetry.toml @@ -1,21 +1,92 @@ #################################################################################################### -# Pipelex Telemetry Configuration +# Custom Telemetry Configuration #################################################################################################### # -# This file controls telemetry settings for Pipelex usage analytics. -# Telemetry helps us improve Pipelex by understanding how it's used. +# This file controls YOUR custom telemetry settings for observability and analytics. +# Configure your own PostHog, Langfuse, or OTLP-compatible backends here. +# +# NOTE: When using Pipelex Gateway, identified telemetry is automatically enabled +# (tied to your Gateway API key, hashed for security). This allows us to monitor +# service quality, enforce fair usage, and provide you with better support. +# Gateway telemetry operates independently from your settings below - you can have both! +# +# To disable all telemetry, set the DO_NOT_TRACK=1 environment variable. # # Documentation: https://docs.pipelex.com # Support: https://go.pipelex.com/discord # #################################################################################################### -telemetry_mode = "off" # Values: "off" | "anonymous" | "identified" -host = "https://eu.i.posthog.com" -project_api_key = "phc_HPJnNKpIXh0SxNDYyTAyUtnq9KxNNZJWQszynsWVx4Y" -respect_dnt = true -redact = ["prompt", "system_prompt", "response", "file_path", "url"] -geoip_enabled = true -dry_mode_enabled = false -verbose_enabled = false -user_id = "" +# ────────────────────────────────────────────────────────────────────────────── +# PostHog Configuration (Event tracking + AI span tracing) +# ────────────────────────────────────────────────────────────────────────────── + +[custom_posthog] +mode = "off" # Values: "off" | "anonymous" | "identified" +# user_id = "your_user_id" # Required when mode = "identified" +endpoint = "${POSTHOG_ENDPOINT}" # Default: https://us.i.posthog.com (or https://eu.i.posthog.com for EU) +api_key = "${POSTHOG_API_KEY}" # Get from PostHog Project Settings +geoip = true # Enable GeoIP lookup +debug = false # Enable PostHog debug mode +redact_properties = [ + "prompt", + "system_prompt", + "response", + "file_path", + "url", +] # Event properties to redact + +# AI span tracing to YOUR PostHog (does NOT affect Langfuse/OTLP - they receive full data) +[custom_posthog.tracing] +enabled = false # Send AI spans to your PostHog + +# Privacy controls for data sent to YOUR PostHog only +[custom_posthog.tracing.capture] +content = false # Capture prompt/completion content +# content_max_length = 1000 # Max length for captured content (omit for unlimited) +pipe_codes = false # Include pipe codes in span names/attributes +output_class_names = false # Include output class names in span names/attributes + +# ────────────────────────────────────────────────────────────────────────────── +# Portkey SDK Configuration +# ────────────────────────────────────────────────────────────────────────────── + +[custom_portkey] +force_debug_enabled = false +force_tracing_enabled = false + +# ────────────────────────────────────────────────────────────────────────────── +# Langfuse Integration +# Note: Langfuse receives FULL span data (no redaction) +# ────────────────────────────────────────────────────────────────────────────── + +[langfuse] +enabled = false +# endpoint = "https://cloud.langfuse.com" # Override for self-hosted Langfuse +# public_key = "${LANGFUSE_PUBLIC_KEY}" # Langfuse public key +# secret_key = "${LANGFUSE_SECRET_KEY}" # Langfuse secret key + +# ────────────────────────────────────────────────────────────────────────────── +# Additional OTLP Exporters (array for multiple) +# Note: OTLP exporters receive FULL span data (no redaction) +# ────────────────────────────────────────────────────────────────────────────── + +# [[otlp]] +# name = "my-collector" # Identifier for logging +# endpoint = "https://..." # OTLP endpoint URL +# headers = { Authorization = "Bearer ${OTLP_AUTH_TOKEN}" } # Headers for OTLP export + +# ────────────────────────────────────────────────────────────────────────────── +# Custom Telemetry Allowed Modes +# Controls which integration modes can use custom telemetry settings above. +# ────────────────────────────────────────────────────────────────────────────── + +[telemetry_allowed_modes] +ci = false # CI environments don't use custom telemetry +cli = true # CLI usage allows custom telemetry +docker = true # Docker deployments allow custom telemetry +fastapi = true # FastAPI integrations allow custom telemetry +mcp = true # MCP integrations allow custom telemetry +n8n = true # n8n integrations allow custom telemetry +pytest = false # Tests don't use custom telemetry +python = false # Direct Python SDK usage doesn't use custom telemetry by default diff --git a/CHANGELOG.md b/CHANGELOG.md index cc76214..d07e2be 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,11 @@ # Changelog +## [v0.0.12] - 2026-01-15 + +### Changed + +- Bump `pipelex` to `v0.18.0`, the `Chicago` release: See changelog [here](https://docs.pipelex.com/changelog/) + ## [v0.0.11] - 2025-12-01 ### Added diff --git a/Makefiles/Makefile.local.mk b/Makefiles/Makefile.local.mk index af1c750..1e2643d 100644 --- a/Makefiles/Makefile.local.mk +++ b/Makefiles/Makefile.local.mk @@ -30,5 +30,5 @@ local-run-api: docker run -p 8081:8081 \ -e ENV=$(ENV) \ -e API_KEY="${API_KEY}" \ - -e PIPELEX_INFERENCE_API_KEY="${PIPELEX_INFERENCE_API_KEY}" \ + -e PIPELEX_GATEWAY_API_KEY="${PIPELEX_GATEWAY_API_KEY}" \ $(LOCAL_DOCKER_IMAGE_NAME) diff --git a/README.md b/README.md index 49dd048..81d23a1 100644 --- a/README.md +++ b/README.md @@ -39,41 +39,37 @@ The **Pipelex API Server** is a FastAPI-based REST API that allows you to execut ### 1. Configure Environment -Create a `.env` file with your API key and LLM provider configuration: +Create a `.env` file with your API keys: ```bash -# Required: Your API authentication key. This is the API key that will be required to access the API. +# Required: API key to authenticate requests to this API server API_KEY=your-api-key-here -# AI inference provider API key -PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key - -# TEMPORARY: Required for image generation (will be integrated into unified inference system) -FAL_API_KEY=your-fal-api-key +# Required: Pipelex Gateway API key to access all LLMs +PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key ``` -You can get a free Pipelex Inference API key ($20 of free credits) by joining our [Discord community](https://go.pipelex.com/discord) and requesting it in the appropriate channel. +You can get a free Pipelex Gateway API key ($20 of free credits) here https://app.pipelex.com > **For complete API key configuration**, see the [API Key Configuration section](https://github.com/Pipelex/pipelex#api-key-configuration) in the main Pipelex repository. ### 2. Run with Docker -**Option A: Using Docker Compose (Recommended)** - -See [`docker-compose.yml`](docker-compose.yml) for reference. +**Option A: Using Docker Run (Recommended)** ```bash -docker-compose up +docker run --name pipelex-api -p 8081:8081 \ + -e API_KEY=your-api-key-here \ + -e PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key \ + pipelex/pipelex-api:latest ``` -**Option B: Using Docker Run** +**Option B: Using Docker Compose** + +See [`docker-compose.yml`](docker-compose.yml) for reference. ```bash -docker run --name pipelex-api -p 8081:8081 \ - -e API_KEY=your-api-key-here \ - -e PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key \ - -e FAL_API_KEY=your-fal-api-key \ - pipelex/pipelex-api:latest +docker-compose up ``` **Option C: Build Locally** @@ -82,8 +78,7 @@ docker run --name pipelex-api -p 8081:8081 \ docker build -t pipelex-api . docker run --name pipelex-api -p 8081:8081 \ -e API_KEY=your-api-key-here \ - -e PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key \ - -e FAL_API_KEY=your-fal-api-key \ + -e PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key \ pipelex-api ``` diff --git a/api/routes/helpers.py b/api/routes/helpers.py index 919f996..73bdff1 100644 --- a/api/routes/helpers.py +++ b/api/routes/helpers.py @@ -70,7 +70,7 @@ def extract_pipe_structures(pipes: list[PipeAbstract]) -> dict[str, dict[str, An inputs_specs[input_name] = get_concept_structure(concept) # Process output - extract concept structure - output_spec = get_concept_structure(pipe.output) + output_spec = get_concept_structure(pipe.output.concept) # Store structure info for this pipe pipe_structures[pipe.code] = { diff --git a/api/routes/pipelex/pipe_builder.py b/api/routes/pipelex/pipe_builder.py index 615e14c..a9e41df 100644 --- a/api/routes/pipelex/pipe_builder.py +++ b/api/routes/pipelex/pipe_builder.py @@ -41,7 +41,7 @@ async def build_pipe(request_data: PipeBuilderRequest): # Execute the pipe_builder pipeline library_manager = get_library_manager() builder_loop = BuilderLoop() - pipelex_bundle_spec = await builder_loop.build_and_fix(inputs={"brief": request_data.brief}, builder_pipe="pipe_builder") + pipelex_bundle_spec, _ = await builder_loop.build_and_fix(inputs={"brief": request_data.brief}, builder_pipe="pipe_builder") blueprint = pipelex_bundle_spec.to_blueprint() library_id, _ = library_manager.open_library() diff --git a/api/security.py b/api/security.py index f285bd8..d679358 100644 --- a/api/security.py +++ b/api/security.py @@ -1,4 +1,4 @@ -"""Authentication - Pluggable JWT or API Key validation.""" +"""Authentication - API Key (default) or JWT validation.""" from typing import Annotated, Any, cast @@ -10,8 +10,7 @@ security = HTTPBearer() -# JWT Configuration -JWT_SECRET = get_optional_env("JWT_SECRET_KEY") +# JWT Configuration (only used when USE_JWT=true) JWT_ALGORITHM = "HS256" @@ -21,6 +20,8 @@ async def verify_jwt(credentials: Annotated[HTTPAuthorizationCredentials, Depend The JWT token is extracted from the Authorization header (Bearer token). It is verified using the JWT_SECRET_KEY from environment variables. + This method is only used when USE_JWT=true is set. + Args: credentials: The credentials extracted from the Authorization header @@ -30,7 +31,8 @@ async def verify_jwt(credentials: Annotated[HTTPAuthorizationCredentials, Depend Raises: HTTPException: If token is invalid or expired """ - if not JWT_SECRET: + jwt_secret = get_optional_env("JWT_SECRET_KEY") + if not jwt_secret: log.error("JWT_SECRET_KEY environment variable is not set") msg = "Server configuration error: JWT_SECRET_KEY not configured" raise HTTPException( @@ -44,7 +46,7 @@ async def verify_jwt(credentials: Annotated[HTTPAuthorizationCredentials, Depend # Decode and verify the JWT token from the request payload = jwt.decode( token, - JWT_SECRET, + jwt_secret, algorithms=[JWT_ALGORITHM], ) @@ -88,7 +90,10 @@ async def verify_jwt(credentials: Annotated[HTTPAuthorizationCredentials, Depend async def verify_api_key(credentials: Annotated[HTTPAuthorizationCredentials, Depends(security)]) -> str: - """Validate static API key (local/development mode). + """Validate static API key (default authentication method). + + This is the default authentication method. It validates the provided + Bearer token against the API_KEY environment variable. Args: credentials: The credentials extracted from the Authorization header @@ -97,7 +102,7 @@ async def verify_api_key(credentials: Annotated[HTTPAuthorizationCredentials, De str: The validated API key Raises: - HTTPException: If API key is invalid + HTTPException: If API key is invalid or not configured """ try: api_key = get_optional_env("API_KEY") @@ -119,7 +124,7 @@ async def verify_api_key(credentials: Annotated[HTTPAuthorizationCredentials, De headers={"WWW-Authenticate": "Bearer"}, ) - log.info("✅ API key validated (local mode)") + log.info("✅ API key validated") return credentials.credentials except HTTPException: @@ -134,12 +139,13 @@ async def verify_api_key(credentials: Annotated[HTTPAuthorizationCredentials, De ) from exc -# Auto-select authentication method based on environment def get_auth_dependency(): - """Select authentication dependency based on USE_JWT environment variable. + """Select authentication method based on environment. + + By default, uses API key authentication. Set USE_JWT=true to enable JWT authentication. Returns: - The appropriate authentication function (verify_jwt or verify_api_key) + The appropriate authentication function (verify_api_key by default, or verify_jwt if enabled) """ use_jwt = get_optional_env("USE_JWT") == "true" return verify_jwt if use_jwt else verify_api_key diff --git a/docker-compose.yml b/docker-compose.yml index 12aa8e6..c98c086 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,5 +5,4 @@ services: - "8081:8081" environment: - API_KEY=${API_KEY} - - PIPELEX_INFERENCE_API_KEY=${PIPELEX_INFERENCE_API_KEY} - - FAL_API_KEY=${FAL_API_KEY} + - PIPELEX_GATEWAY_API_KEY=${PIPELEX_GATEWAY_API_KEY} diff --git a/docs/index.md b/docs/index.md index 856c954..f1bff51 100644 --- a/docs/index.md +++ b/docs/index.md @@ -16,39 +16,35 @@ The Pipelex API is currently available for local deployment only. You can deploy ### 1. Configure Environment -Create a `.env` file with your API key and LLM provider configuration: +Create a `.env` file with your API keys: ```bash -# Required: Your API authentication key +# Required: API key to authenticate requests to this API server API_KEY=your-api-key-here -# AI inference provider API key -PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key - -# TEMPORARY: Required for image generation (will be integrated into unified inference system) -FAL_API_KEY=your-fal-api-key +# Required: Pipelex Gateway API key to access all LLMs +PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key ``` -You can get a free Pipelex Inference API key ($20 of free credits) by joining our [Discord community](https://go.pipelex.com/discord). +You can get a free Pipelex Gateway API key ($20 of free credits) here https://app.pipelex.com > For complete API key configuration, see the [API Key Configuration section](https://github.com/Pipelex/pipelex#api-key-configuration) in the main Pipelex repository. ### 2. Run with Docker -**Option A: Using Docker Compose (Recommended)** +**Option A: Using Docker Run (Recommended)** ```bash -docker-compose up +docker run --name pipelex-api -p 8081:8081 \ + -e API_KEY=your-api-key-here \ + -e PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key \ + pipelex/pipelex-api:latest ``` -**Option B: Using Docker Run** +**Option B: Using Docker Compose** ```bash -docker run --name pipelex-api -p 8081:8081 \ - -e API_KEY=your-api-key-here \ - -e PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key \ - -e FAL_API_KEY=your-fal-api-key \ - pipelex/pipelex-api:latest +docker-compose up ``` **Option C: Build Locally** @@ -57,8 +53,7 @@ docker run --name pipelex-api -p 8081:8081 \ docker build -t pipelex-api . docker run --name pipelex-api -p 8081:8081 \ -e API_KEY=your-api-key-here \ - -e PIPELEX_INFERENCE_API_KEY=your-pipelex-inference-api-key \ - -e FAL_API_KEY=your-fal-api-key \ + -e PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key \ pipelex-api ``` @@ -78,12 +73,41 @@ http://localhost:8081/api/v1 ## Authentication -Include your API key in the Authorization header: +The API supports two authentication methods: + +### API Key Authentication (Default) + +By default, the API uses static API key authentication. Include your API key in the Authorization header: ``` Authorization: Bearer YOUR_API_KEY ``` +The API key is configured via the `API_KEY` environment variable when deploying the server. + +### JWT Authentication (Optional) + +For production deployments requiring user-specific tokens, JWT authentication is available. + +**To enable JWT authentication:** + +1. Set the `USE_JWT=true` environment variable +2. Set the `JWT_SECRET_KEY` environment variable with your secret key + +```bash +docker run --name pipelex-api -p 8081:8081 \ + -e USE_JWT=true \ + -e JWT_SECRET_KEY=your-jwt-secret-key \ + -e PIPELEX_GATEWAY_API_KEY=your-pipelex-gateway-api-key \ + pipelex/pipelex-api:latest +``` + +**JWT Requirements:** + +- Tokens must be signed with the HS256 algorithm +- Tokens must contain an `email` field in the payload +- Pass the JWT in the Authorization header: `Authorization: Bearer YOUR_JWT_TOKEN` + ## API Endpoints The Pipelex API provides three main capabilities: diff --git a/pyproject.toml b/pyproject.toml index 52d5ca2..87db9b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [project] name = "pipelex-api" -version = "0.0.11" +version = "0.0.12" description = "Pipelex API" authors = [{ name = "Evotis S.A.S.", email = "oss@pipelex.com" }] maintainers = [{ name = "Pipelex staff", email = "oss@pipelex.com" }] @@ -16,12 +16,16 @@ classifiers = [ ] dependencies = [ - "pipelex[mistralai,anthropic,google,google-genai,bedrock,fal]==0.17.3", + "pipelex[mistralai,anthropic,google,google-genai,bedrock,fal]", "fastapi>=0.118.0", "pyjwt>=2.10.1", "uvicorn>=0.37.0", ] +[tool.uv.sources] +pipelex = { git = "https://github.com/Pipelex/pipelex.git", branch = "feature/Chicago" } + + [build-system] requires = ["hatchling"] build-backend = "hatchling.build" @@ -35,7 +39,6 @@ Changelog = "https://github.com/Pipelex/pipelex-api/blob/main/CHANGELOG.md" [project.optional-dependencies] dev = [ "boto3-stubs>=1.35.24", - "cocode==0.1.2", "mypy>=1.11.2", "pyright>=1.1.405", "pandas-stubs>=2.2.3.241126", diff --git a/uv.lock b/uv.lock index 931c621..bb60a6b 100644 --- a/uv.lock +++ b/uv.lock @@ -339,6 +339,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/da/a688b78d6d5db9ba7d5243b27d73c45e5325054c536d7cf90b7ceab366c5/botocore_stubs-1.41.5-py3-none-any.whl", hash = "sha256:73661df27cc73242f1478af5eeed07c2e1e688479963402cd62c61a46d93216a", size = 66749, upload-time = "2025-11-26T21:22:57.036Z" }, ] +[[package]] +name = "cached-property" +version = "2.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/76/4b/3d870836119dbe9a5e3c9a61af8cc1a8b69d75aea564572e385882d5aefb/cached_property-2.0.1.tar.gz", hash = "sha256:484d617105e3ee0e4f1f58725e72a8ef9e93deee462222dbd51cd91230897641", size = 10574, upload-time = "2024-10-25T15:43:55.667Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/11/0e/7d8225aab3bc1a0f5811f8e1b557aa034ac04bdf641925b30d3caf586b28/cached_property-2.0.1-py3-none-any.whl", hash = "sha256:f617d70ab1100b7bcf6e42228f9ddcb78c676ffa167278d9f730d1c2fba69ccb", size = 7428, upload-time = "2024-10-25T15:43:54.711Z" }, +] + [[package]] name = "cachetools" version = "6.2.2" @@ -357,76 +366,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/70/7d/9bc192684cea499815ff478dfcdc13835ddf401365057044fb721ec6bddb/certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b", size = 159438, upload-time = "2025-11-12T02:54:49.735Z" }, ] -[[package]] -name = "cffi" -version = "2.0.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pycparser", marker = "implementation_name != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/12/4a/3dfd5f7850cbf0d06dc84ba9aa00db766b52ca38d8b86e3a38314d52498c/cffi-2.0.0-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:b4c854ef3adc177950a8dfc81a86f5115d2abd545751a304c5bcf2c2c7283cfe", size = 184344, upload-time = "2025-09-08T23:22:26.456Z" }, - { url = "https://files.pythonhosted.org/packages/4f/8b/f0e4c441227ba756aafbe78f117485b25bb26b1c059d01f137fa6d14896b/cffi-2.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2de9a304e27f7596cd03d16f1b7c72219bd944e99cc52b84d0145aefb07cbd3c", size = 180560, upload-time = "2025-09-08T23:22:28.197Z" }, - { url = "https://files.pythonhosted.org/packages/b1/b7/1200d354378ef52ec227395d95c2576330fd22a869f7a70e88e1447eb234/cffi-2.0.0-cp311-cp311-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:baf5215e0ab74c16e2dd324e8ec067ef59e41125d3eade2b863d294fd5035c92", size = 209613, upload-time = "2025-09-08T23:22:29.475Z" }, - { url = "https://files.pythonhosted.org/packages/b8/56/6033f5e86e8cc9bb629f0077ba71679508bdf54a9a5e112a3c0b91870332/cffi-2.0.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:730cacb21e1bdff3ce90babf007d0a0917cc3e6492f336c2f0134101e0944f93", size = 216476, upload-time = "2025-09-08T23:22:31.063Z" }, - { url = "https://files.pythonhosted.org/packages/dc/7f/55fecd70f7ece178db2f26128ec41430d8720f2d12ca97bf8f0a628207d5/cffi-2.0.0-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:6824f87845e3396029f3820c206e459ccc91760e8fa24422f8b0c3d1731cbec5", size = 203374, upload-time = "2025-09-08T23:22:32.507Z" }, - { url = "https://files.pythonhosted.org/packages/84/ef/a7b77c8bdc0f77adc3b46888f1ad54be8f3b7821697a7b89126e829e676a/cffi-2.0.0-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:9de40a7b0323d889cf8d23d1ef214f565ab154443c42737dfe52ff82cf857664", size = 202597, upload-time = "2025-09-08T23:22:34.132Z" }, - { url = "https://files.pythonhosted.org/packages/d7/91/500d892b2bf36529a75b77958edfcd5ad8e2ce4064ce2ecfeab2125d72d1/cffi-2.0.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:8941aaadaf67246224cee8c3803777eed332a19d909b47e29c9842ef1e79ac26", size = 215574, upload-time = "2025-09-08T23:22:35.443Z" }, - { url = "https://files.pythonhosted.org/packages/44/64/58f6255b62b101093d5df22dcb752596066c7e89dd725e0afaed242a61be/cffi-2.0.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a05d0c237b3349096d3981b727493e22147f934b20f6f125a3eba8f994bec4a9", size = 218971, upload-time = "2025-09-08T23:22:36.805Z" }, - { url = "https://files.pythonhosted.org/packages/ab/49/fa72cebe2fd8a55fbe14956f9970fe8eb1ac59e5df042f603ef7c8ba0adc/cffi-2.0.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:94698a9c5f91f9d138526b48fe26a199609544591f859c870d477351dc7b2414", size = 211972, upload-time = "2025-09-08T23:22:38.436Z" }, - { url = "https://files.pythonhosted.org/packages/0b/28/dd0967a76aab36731b6ebfe64dec4e981aff7e0608f60c2d46b46982607d/cffi-2.0.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5fed36fccc0612a53f1d4d9a816b50a36702c28a2aa880cb8a122b3466638743", size = 217078, upload-time = "2025-09-08T23:22:39.776Z" }, - { url = "https://files.pythonhosted.org/packages/2b/c0/015b25184413d7ab0a410775fdb4a50fca20f5589b5dab1dbbfa3baad8ce/cffi-2.0.0-cp311-cp311-win32.whl", hash = "sha256:c649e3a33450ec82378822b3dad03cc228b8f5963c0c12fc3b1e0ab940f768a5", size = 172076, upload-time = "2025-09-08T23:22:40.95Z" }, - { url = "https://files.pythonhosted.org/packages/ae/8f/dc5531155e7070361eb1b7e4c1a9d896d0cb21c49f807a6c03fd63fc877e/cffi-2.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:66f011380d0e49ed280c789fbd08ff0d40968ee7b665575489afa95c98196ab5", size = 182820, upload-time = "2025-09-08T23:22:42.463Z" }, - { url = "https://files.pythonhosted.org/packages/95/5c/1b493356429f9aecfd56bc171285a4c4ac8697f76e9bbbbb105e537853a1/cffi-2.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:c6638687455baf640e37344fe26d37c404db8b80d037c3d29f58fe8d1c3b194d", size = 177635, upload-time = "2025-09-08T23:22:43.623Z" }, - { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, - { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, - { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, - { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, - { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, - { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, - { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, - { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, - { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, - { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, - { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, - { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, - { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, - { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, - { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, - { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, - { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, - { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, - { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, - { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, - { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, - { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, - { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, - { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, - { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, - { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, - { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, - { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, - { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, - { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, - { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, - { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, - { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, - { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, - { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, - { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, - { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, - { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, - { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, - { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, - { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, - { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, - { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, - { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, - { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, - { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, -] - [[package]] name = "cfgv" version = "3.5.0" @@ -521,19 +460,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] -[[package]] -name = "cocode" -version = "0.1.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pipelex", extra = ["anthropic", "bedrock", "google"] }, - { name = "pygithub" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/08/a8/890526fa2f3cab311cb7a6d56fedd56c6829254fe13ca58435883f14c21f/cocode-0.1.2.tar.gz", hash = "sha256:63b7f9c51bd18c55f485051d714577a5b755498a20e6a9c69fa9ed734fb47801", size = 43905, upload-time = "2025-09-03T09:14:33.233Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/56/e37fd29be59dca658a22aef6328f475850ff4b8cc988e60e04ccdc9fb2b4/cocode-0.1.2-py3-none-any.whl", hash = "sha256:32aba620170da4e1855394fdf37274bf443a98fba98f0d5da1d89a06f06aa8c6", size = 60516, upload-time = "2025-09-03T09:14:32.072Z" }, -] - [[package]] name = "colorama" version = "0.4.6" @@ -635,80 +561,6 @@ toml = [ { name = "tomli", marker = "python_full_version <= '3.11'" }, ] -[[package]] -name = "cryptography" -version = "46.0.3" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/42/9c391dd801d6cf0d561b5890549d4b27bafcc53b39c31a817e69d87c625b/cryptography-46.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:109d4ddfadf17e8e7779c39f9b18111a09efb969a301a31e987416a0191ed93a", size = 7225004, upload-time = "2025-10-15T23:16:52.239Z" }, - { url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" }, - { url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" }, - { url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" }, - { url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" }, - { url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" }, - { url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" }, - { url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" }, - { url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" }, - { url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" }, - { url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" }, - { url = "https://files.pythonhosted.org/packages/96/92/8a6a9525893325fc057a01f654d7efc2c64b9de90413adcf605a85744ff4/cryptography-46.0.3-cp311-abi3-win32.whl", hash = "sha256:f260d0d41e9b4da1ed1e0f1ce571f97fe370b152ab18778e9e8f67d6af432018", size = 3055988, upload-time = "2025-10-15T23:17:14.65Z" }, - { url = "https://files.pythonhosted.org/packages/7e/bf/80fbf45253ea585a1e492a6a17efcb93467701fa79e71550a430c5e60df0/cryptography-46.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:a9a3008438615669153eb86b26b61e09993921ebdd75385ddd748702c5adfddb", size = 3514451, upload-time = "2025-10-15T23:17:16.142Z" }, - { url = "https://files.pythonhosted.org/packages/2e/af/9b302da4c87b0beb9db4e756386a7c6c5b8003cd0e742277888d352ae91d/cryptography-46.0.3-cp311-abi3-win_arm64.whl", hash = "sha256:5d7f93296ee28f68447397bf5198428c9aeeab45705a55d53a6343455dcb2c3c", size = 2928007, upload-time = "2025-10-15T23:17:18.04Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e2/a510aa736755bffa9d2f75029c229111a1d02f8ecd5de03078f4c18d91a3/cryptography-46.0.3-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:00a5e7e87938e5ff9ff5447ab086a5706a957137e6e433841e9d24f38a065217", size = 7158012, upload-time = "2025-10-15T23:17:19.982Z" }, - { url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" }, - { url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" }, - { url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" }, - { url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" }, - { url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" }, - { url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" }, - { url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" }, - { url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" }, - { url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" }, - { url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" }, - { url = "https://files.pythonhosted.org/packages/fd/30/27654c1dbaf7e4a3531fa1fc77986d04aefa4d6d78259a62c9dc13d7ad36/cryptography-46.0.3-cp314-cp314t-win32.whl", hash = "sha256:8a6e050cb6164d3f830453754094c086ff2d0b2f3a897a1d9820f6139a1f0914", size = 3022339, upload-time = "2025-10-15T23:17:40.888Z" }, - { url = "https://files.pythonhosted.org/packages/f6/30/640f34ccd4d2a1bc88367b54b926b781b5a018d65f404d409aba76a84b1c/cryptography-46.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:760f83faa07f8b64e9c33fc963d790a2edb24efb479e3520c14a45741cd9b2db", size = 3494315, upload-time = "2025-10-15T23:17:42.769Z" }, - { url = "https://files.pythonhosted.org/packages/ba/8b/88cc7e3bd0a8e7b861f26981f7b820e1f46aa9d26cc482d0feba0ecb4919/cryptography-46.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:516ea134e703e9fe26bcd1277a4b59ad30586ea90c365a87781d7887a646fe21", size = 2919331, upload-time = "2025-10-15T23:17:44.468Z" }, - { url = "https://files.pythonhosted.org/packages/fd/23/45fe7f376a7df8daf6da3556603b36f53475a99ce4faacb6ba2cf3d82021/cryptography-46.0.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:cb3d760a6117f621261d662bccc8ef5bc32ca673e037c83fbe565324f5c46936", size = 7218248, upload-time = "2025-10-15T23:17:46.294Z" }, - { url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" }, - { url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" }, - { url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" }, - { url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" }, - { url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" }, - { url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" }, - { url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" }, - { url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" }, - { url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" }, - { url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" }, - { url = "https://files.pythonhosted.org/packages/0a/6e/1c8331ddf91ca4730ab3086a0f1be19c65510a33b5a441cb334e7a2d2560/cryptography-46.0.3-cp38-abi3-win32.whl", hash = "sha256:6276eb85ef938dc035d59b87c8a7dc559a232f954962520137529d77b18ff1df", size = 3036695, upload-time = "2025-10-15T23:18:08.672Z" }, - { url = "https://files.pythonhosted.org/packages/90/45/b0d691df20633eff80955a0fc7695ff9051ffce8b69741444bd9ed7bd0db/cryptography-46.0.3-cp38-abi3-win_amd64.whl", hash = "sha256:416260257577718c05135c55958b674000baef9a1c7d9e8f306ec60d71db850f", size = 3501720, upload-time = "2025-10-15T23:18:10.632Z" }, - { url = "https://files.pythonhosted.org/packages/e8/cb/2da4cc83f5edb9c3257d09e1e7ab7b23f049c7962cae8d842bbef0a9cec9/cryptography-46.0.3-cp38-abi3-win_arm64.whl", hash = "sha256:d89c3468de4cdc4f08a57e214384d0471911a3830fcdaf7a8cc587e42a866372", size = 2918740, upload-time = "2025-10-15T23:18:12.277Z" }, - { url = "https://files.pythonhosted.org/packages/06/8a/e60e46adab4362a682cf142c7dcb5bf79b782ab2199b0dcb81f55970807f/cryptography-46.0.3-pp311-pypy311_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ce938a99998ed3c8aa7e7272dca1a610401ede816d36d0693907d863b10d9ea", size = 3698132, upload-time = "2025-10-15T23:18:17.056Z" }, - { url = "https://files.pythonhosted.org/packages/da/38/f59940ec4ee91e93d3311f7532671a5cef5570eb04a144bf203b58552d11/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:191bb60a7be5e6f54e30ba16fdfae78ad3a342a0599eb4193ba88e3f3d6e185b", size = 4243992, upload-time = "2025-10-15T23:18:18.695Z" }, - { url = "https://files.pythonhosted.org/packages/b0/0c/35b3d92ddebfdfda76bb485738306545817253d0a3ded0bfe80ef8e67aa5/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c70cc23f12726be8f8bc72e41d5065d77e4515efae3690326764ea1b07845cfb", size = 4409944, upload-time = "2025-10-15T23:18:20.597Z" }, - { url = "https://files.pythonhosted.org/packages/99/55/181022996c4063fc0e7666a47049a1ca705abb9c8a13830f074edb347495/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:9394673a9f4de09e28b5356e7fff97d778f8abad85c9d5ac4a4b7e25a0de7717", size = 4242957, upload-time = "2025-10-15T23:18:22.18Z" }, - { url = "https://files.pythonhosted.org/packages/ba/af/72cd6ef29f9c5f731251acadaeb821559fe25f10852f44a63374c9ca08c1/cryptography-46.0.3-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94cd0549accc38d1494e1f8de71eca837d0509d0d44bf11d158524b0e12cebf9", size = 4409447, upload-time = "2025-10-15T23:18:24.209Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c3/e90f4a4feae6410f914f8ebac129b9ae7a8c92eb60a638012dde42030a9d/cryptography-46.0.3-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6b5063083824e5509fdba180721d55909ffacccc8adbec85268b48439423d78c", size = 3438528, upload-time = "2025-10-15T23:18:26.227Z" }, -] - -[[package]] -name = "deprecated" -version = "1.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wrapt" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/49/85/12f0a49a7c4ffb70572b6c2ef13c90c88fd190debda93b23f026b25f9634/deprecated-1.3.1.tar.gz", hash = "sha256:b1b50e0ff0c1fddaa5708a2c6b0a6588bb09b892825ab2b214ac9ea9d92a5223", size = 2932523, upload-time = "2025-10-30T08:19:02.757Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/84/d0/205d54408c08b13550c733c4b85429e7ead111c7f0014309637425520a9a/deprecated-1.3.1-py2.py3-none-any.whl", hash = "sha256:597bfef186b6f60181535a29fbe44865ce137a5079f295b479886c82729d5f3f", size = 11298, upload-time = "2025-10-30T08:19:00.758Z" }, -] - [[package]] name = "dill" version = "0.4.0" @@ -993,6 +845,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/ec/66/03f663e7bca7abe9ccfebe6cb3fe7da9a118fd723a5abb278d6117e7990e/google_genai-1.52.0-py3-none-any.whl", hash = "sha256:c8352b9f065ae14b9322b949c7debab8562982f03bf71d44130cd2b798c20743", size = 261219, upload-time = "2025-11-21T02:18:54.515Z" }, ] +[[package]] +name = "googleapis-common-protos" +version = "1.72.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e5/7b/adfd75544c415c487b33061fe7ae526165241c1ea133f9a9125a56b39fd8/googleapis_common_protos-1.72.0.tar.gz", hash = "sha256:e55a601c1b32b52d7a3e65f43563e2aa61bcd737998ee672ac9b951cd49319f5", size = 147433, upload-time = "2025-11-06T18:29:24.087Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c4/ab/09169d5a4612a5f92490806649ac8d41e3ec9129c636754575b3553f4ea4/googleapis_common_protos-1.72.0-py3-none-any.whl", hash = "sha256:4299c5a82d5ae1a9702ada957347726b167f9f8d1fc352477702a1e851ff4038", size = 297515, upload-time = "2025-11-06T18:29:13.14Z" }, +] + [[package]] name = "h11" version = "0.16.0" @@ -1057,6 +921,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] +[[package]] +name = "importlib-metadata" +version = "8.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, +] + [[package]] name = "iniconfig" version = "2.3.0" @@ -1769,6 +1645,88 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/55/4f/dbc0c124c40cb390508a82770fb9f6e3ed162560181a85089191a851c59a/openai-2.8.1-py3-none-any.whl", hash = "sha256:c6c3b5a04994734386e8dad3c00a393f56d3b68a27cd2e8acae91a59e4122463", size = 1022688, upload-time = "2025-11-17T22:39:57.675Z" }, ] +[[package]] +name = "opentelemetry-api" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "importlib-metadata" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/b9/3161be15bb8e3ad01be8be5a968a9237c3027c5be504362ff800fca3e442/opentelemetry_api-1.39.1.tar.gz", hash = "sha256:fbde8c80e1b937a2c61f20347e91c0c18a1940cecf012d62e65a7caf08967c9c", size = 65767, upload-time = "2025-12-11T13:32:39.182Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cf/df/d3f1ddf4bb4cb50ed9b1139cc7b1c54c34a1e7ce8fd1b9a37c0d1551a6bd/opentelemetry_api-1.39.1-py3-none-any.whl", hash = "sha256:2edd8463432a7f8443edce90972169b195e7d6a05500cd29e6d13898187c9950", size = 66356, upload-time = "2025-12-11T13:32:17.304Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-common" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-proto" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e9/9d/22d241b66f7bbde88a3bfa6847a351d2c46b84de23e71222c6aae25c7050/opentelemetry_exporter_otlp_proto_common-1.39.1.tar.gz", hash = "sha256:763370d4737a59741c89a67b50f9e39271639ee4afc999dadfe768541c027464", size = 20409, upload-time = "2025-12-11T13:32:40.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/02/ffc3e143d89a27ac21fd557365b98bd0653b98de8a101151d5805b5d4c33/opentelemetry_exporter_otlp_proto_common-1.39.1-py3-none-any.whl", hash = "sha256:08f8a5862d64cc3435105686d0216c1365dc5701f86844a8cd56597d0c764fde", size = 18366, upload-time = "2025-12-11T13:32:20.2Z" }, +] + +[[package]] +name = "opentelemetry-exporter-otlp-proto-http" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "googleapis-common-protos" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-common" }, + { name = "opentelemetry-proto" }, + { name = "opentelemetry-sdk" }, + { name = "requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/80/04/2a08fa9c0214ae38880df01e8bfae12b067ec0793446578575e5080d6545/opentelemetry_exporter_otlp_proto_http-1.39.1.tar.gz", hash = "sha256:31bdab9745c709ce90a49a0624c2bd445d31a28ba34275951a6a362d16a0b9cb", size = 17288, upload-time = "2025-12-11T13:32:42.029Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/95/f1/b27d3e2e003cd9a3592c43d099d2ed8d0a947c15281bf8463a256db0b46c/opentelemetry_exporter_otlp_proto_http-1.39.1-py3-none-any.whl", hash = "sha256:d9f5207183dd752a412c4cd564ca8875ececba13be6e9c6c370ffb752fd59985", size = 19641, upload-time = "2025-12-11T13:32:22.248Z" }, +] + +[[package]] +name = "opentelemetry-proto" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "protobuf" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/1d/f25d76d8260c156c40c97c9ed4511ec0f9ce353f8108ca6e7561f82a06b2/opentelemetry_proto-1.39.1.tar.gz", hash = "sha256:6c8e05144fc0d3ed4d22c2289c6b126e03bcd0e6a7da0f16cedd2e1c2772e2c8", size = 46152, upload-time = "2025-12-11T13:32:48.681Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/95/b40c96a7b5203005a0b03d8ce8cd212ff23f1793d5ba289c87a097571b18/opentelemetry_proto-1.39.1-py3-none-any.whl", hash = "sha256:22cdc78efd3b3765d09e68bfbd010d4fc254c9818afd0b6b423387d9dee46007", size = 72535, upload-time = "2025-12-11T13:32:33.866Z" }, +] + +[[package]] +name = "opentelemetry-sdk" +version = "1.39.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "opentelemetry-semantic-conventions" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/fb/c76080c9ba07e1e8235d24cdcc4d125ef7aa3edf23eb4e497c2e50889adc/opentelemetry_sdk-1.39.1.tar.gz", hash = "sha256:cf4d4563caf7bff906c9f7967e2be22d0d6b349b908be0d90fb21c8e9c995cc6", size = 171460, upload-time = "2025-12-11T13:32:49.369Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/98/e91cf858f203d86f4eccdf763dcf01cf03f1dae80c3750f7e635bfa206b6/opentelemetry_sdk-1.39.1-py3-none-any.whl", hash = "sha256:4d5482c478513ecb0a5d938dcc61394e647066e0cc2676bee9f3af3f3f45f01c", size = 132565, upload-time = "2025-12-11T13:32:35.069Z" }, +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.60b1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "opentelemetry-api" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/91/df/553f93ed38bf22f4b999d9be9c185adb558982214f33eae539d3b5cd0858/opentelemetry_semantic_conventions-0.60b1.tar.gz", hash = "sha256:87c228b5a0669b748c76d76df6c364c369c28f1c465e50f661e39737e84bc953", size = 137935, upload-time = "2025-12-11T13:32:50.487Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/5e/5958555e09635d09b75de3c4f8b9cae7335ca545d77392ffe7331534c402/opentelemetry_semantic_conventions-0.60b1-py3-none-any.whl", hash = "sha256:9fa8c8b0c110da289809292b0591220d3a7b53c1526a23021e977d68597893fb", size = 219982, upload-time = "2025-12-11T13:32:36.955Z" }, +] + [[package]] name = "packaging" version = "25.0" @@ -1899,7 +1857,7 @@ wheels = [ [[package]] name = "pipelex" version = "0.17.3" -source = { registry = "https://pypi.org/simple" } +source = { git = "https://github.com/Pipelex/pipelex.git?branch=feature%2FChicago#6c6301aeec180ffcf9ae2cc3cfa1af354004122d" } dependencies = [ { name = "aiofiles" }, { name = "filetype" }, @@ -1911,8 +1869,13 @@ dependencies = [ { name = "markdown" }, { name = "networkx" }, { name = "openai" }, + { name = "opentelemetry-api" }, + { name = "opentelemetry-exporter-otlp-proto-http" }, + { name = "opentelemetry-sdk" }, + { name = "opentelemetry-semantic-conventions" }, { name = "pillow" }, { name = "polyfactory" }, + { name = "portkey-ai" }, { name = "posthog" }, { name = "pydantic" }, { name = "pypdfium2" }, @@ -1924,11 +1887,6 @@ dependencies = [ { name = "tomlkit" }, { name = "typer" }, { name = "typing-extensions" }, - { name = "yattag" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/33/69/98c518fc203b3096bf163124d3eeae8b15f8608c28e4292b977bef99bbc1/pipelex-0.17.3.tar.gz", hash = "sha256:9be425de4faee01d1039f8e97c55a2beb66e91b005be13323709f5843b917d64", size = 368295, upload-time = "2025-12-01T13:45:52.849Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4c/82/1ad4436608bd1c33077d9d0905a0d93640b9c47b310f1c19ae0f96c18224/pipelex-0.17.3-py3-none-any.whl", hash = "sha256:5dda80b9adcfd13433992e2dfd96f484c6339e14511388cb66adf5e22c28cafa", size = 567412, upload-time = "2025-12-01T13:45:51.07Z" }, ] [package.optional-dependencies] @@ -1955,7 +1913,7 @@ mistralai = [ [[package]] name = "pipelex-api" -version = "0.0.11" +version = "0.0.12" source = { editable = "." } dependencies = [ { name = "fastapi" }, @@ -1967,7 +1925,6 @@ dependencies = [ [package.optional-dependencies] dev = [ { name = "boto3-stubs" }, - { name = "cocode" }, { name = "mypy" }, { name = "pandas-stubs" }, { name = "pylint" }, @@ -1999,7 +1956,6 @@ docs = [ [package.metadata] requires-dist = [ { name = "boto3-stubs", marker = "extra == 'dev'", specifier = ">=1.35.24" }, - { name = "cocode", marker = "extra == 'dev'", specifier = "==0.1.2" }, { name = "fastapi", specifier = ">=0.118.0" }, { name = "mkdocs", marker = "extra == 'docs'", specifier = "==1.6.1" }, { name = "mkdocs-glightbox", marker = "extra == 'docs'", specifier = "==0.4.0" }, @@ -2007,7 +1963,7 @@ requires-dist = [ { name = "mkdocs-meta-manager", marker = "extra == 'docs'", specifier = "==1.1.0" }, { name = "mypy", marker = "extra == 'dev'", specifier = ">=1.11.2" }, { name = "pandas-stubs", marker = "extra == 'dev'", specifier = ">=2.2.3.241126" }, - { name = "pipelex", extras = ["mistralai", "anthropic", "google", "google-genai", "bedrock", "fal"], specifier = "==0.17.3" }, + { name = "pipelex", extras = ["mistralai", "anthropic", "google", "google-genai", "bedrock", "fal"], git = "https://github.com/Pipelex/pipelex.git?branch=feature%2FChicago" }, { name = "pyjwt", specifier = ">=2.10.1" }, { name = "pylint", marker = "extra == 'dev'", specifier = ">=3.3.8" }, { name = "pyright", marker = "extra == 'dev'", specifier = ">=1.1.405" }, @@ -2062,6 +2018,27 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/94/7c/535646d75a1c510065169ea65693613c7a6bc64491bea13e7dad4f028ff3/polyfactory-3.1.0-py3-none-any.whl", hash = "sha256:78171232342c25906d542513c9f00ebf41eadec2c67b498490a577024dd7e867", size = 61836, upload-time = "2025-11-25T08:10:14.893Z" }, ] +[[package]] +name = "portkey-ai" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, + { name = "cached-property" }, + { name = "distro" }, + { name = "httpx" }, + { name = "jiter" }, + { name = "pydantic" }, + { name = "sniffio" }, + { name = "tqdm" }, + { name = "types-requests" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d4/8a/f5bbaab806ad61d9959cb7c88c639200feacac1b2ba7b455b97a2f216e7c/portkey_ai-2.1.0.tar.gz", hash = "sha256:c2558041c568eef8528737978089301cb9be056f166a683251831cbfa6a623cb", size = 567417, upload-time = "2025-11-25T20:32:43.102Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/11/c585b90ac842027e5f4f7f7cee72d3197f58ff24b6d7c5f1243aa8fa96be/portkey_ai-2.1.0-py3-none-any.whl", hash = "sha256:2166033f8e198745947fee5321d0bbcfb005afc35468bd5a948fa83dc16b6767", size = 1181622, upload-time = "2025-11-25T20:32:41.185Z" }, +] + [[package]] name = "posthog" version = "7.0.1" @@ -2194,6 +2171,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, ] +[[package]] +name = "protobuf" +version = "6.33.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/b8/cda15d9d46d03d4aa3a67cb6bffe05173440ccf86a9541afaf7ac59a1b6b/protobuf-6.33.4.tar.gz", hash = "sha256:dc2e61bca3b10470c1912d166fe0af67bfc20eb55971dcef8dfa48ce14f0ed91", size = 444346, upload-time = "2026-01-12T18:33:40.109Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/be/24ef9f3095bacdf95b458543334d0c4908ccdaee5130420bf064492c325f/protobuf-6.33.4-cp310-abi3-win32.whl", hash = "sha256:918966612c8232fc6c24c78e1cd89784307f5814ad7506c308ee3cf86662850d", size = 425612, upload-time = "2026-01-12T18:33:29.656Z" }, + { url = "https://files.pythonhosted.org/packages/31/ad/e5693e1974a28869e7cd244302911955c1cebc0161eb32dfa2b25b6e96f0/protobuf-6.33.4-cp310-abi3-win_amd64.whl", hash = "sha256:8f11ffae31ec67fc2554c2ef891dcb561dae9a2a3ed941f9e134c2db06657dbc", size = 436962, upload-time = "2026-01-12T18:33:31.345Z" }, + { url = "https://files.pythonhosted.org/packages/66/15/6ee23553b6bfd82670207ead921f4d8ef14c107e5e11443b04caeb5ab5ec/protobuf-6.33.4-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:2fe67f6c014c84f655ee06f6f66213f9254b3a8b6bda6cda0ccd4232c73c06f0", size = 427612, upload-time = "2026-01-12T18:33:32.646Z" }, + { url = "https://files.pythonhosted.org/packages/2b/48/d301907ce6d0db75f959ca74f44b475a9caa8fcba102d098d3c3dd0f2d3f/protobuf-6.33.4-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:757c978f82e74d75cba88eddec479df9b99a42b31193313b75e492c06a51764e", size = 324484, upload-time = "2026-01-12T18:33:33.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/1c/e53078d3f7fe710572ab2dcffd993e1e3b438ae71cfc031b71bae44fcb2d/protobuf-6.33.4-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c7c64f259c618f0bef7bee042075e390debbf9682334be2b67408ec7c1c09ee6", size = 339256, upload-time = "2026-01-12T18:33:35.231Z" }, + { url = "https://files.pythonhosted.org/packages/e8/8e/971c0edd084914f7ee7c23aa70ba89e8903918adca179319ee94403701d5/protobuf-6.33.4-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:3df850c2f8db9934de4cf8f9152f8dc2558f49f298f37f90c517e8e5c84c30e9", size = 323311, upload-time = "2026-01-12T18:33:36.305Z" }, + { url = "https://files.pythonhosted.org/packages/75/b1/1dc83c2c661b4c62d56cc081706ee33a4fc2835bd90f965baa2663ef7676/protobuf-6.33.4-py3-none-any.whl", hash = "sha256:1fe3730068fcf2e595816a6c34fe66eeedd37d51d0400b72fabc848811fdc1bc", size = 170532, upload-time = "2026-01-12T18:33:39.199Z" }, +] + [[package]] name = "pyasn1" version = "0.6.1" @@ -2215,15 +2207,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/47/8d/d529b5d697919ba8c11ad626e835d4039be708a35b0d22de83a269a6682c/pyasn1_modules-0.4.2-py3-none-any.whl", hash = "sha256:29253a9207ce32b64c3ac6600edc75368f98473906e8fd1043bd6b5b1de2c14a", size = 181259, upload-time = "2025-03-28T02:41:19.028Z" }, ] -[[package]] -name = "pycparser" -version = "2.23" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" }, -] - [[package]] name = "pydantic" version = "2.12.5" @@ -2336,23 +2319,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/36/c7/cfc8e811f061c841d7990b0201912c3556bfeb99cdcb7ed24adc8d6f8704/pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51", size = 2145302, upload-time = "2025-11-04T13:43:46.64Z" }, ] -[[package]] -name = "pygithub" -version = "2.4.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "deprecated" }, - { name = "pyjwt", extra = ["crypto"] }, - { name = "pynacl" }, - { name = "requests" }, - { name = "typing-extensions" }, - { name = "urllib3" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/f1/a0/1e8b8ca88df9857836f5bf8e3ee15dfb810d19814ef700b12f99ce11f691/pygithub-2.4.0.tar.gz", hash = "sha256:6601e22627e87bac192f1e2e39c6e6f69a43152cfb8f307cee575879320b3051", size = 3476673, upload-time = "2024-08-26T06:49:44.029Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0a/f3/e185613c411757c0c18b904ea2db173f2872397eddf444a3fe8cdde47077/PyGithub-2.4.0-py3-none-any.whl", hash = "sha256:81935aa4bdc939fba98fee1cb47422c09157c56a27966476ff92775602b9ee24", size = 362599, upload-time = "2024-08-26T06:49:42.351Z" }, -] - [[package]] name = "pygments" version = "2.19.2" @@ -2371,11 +2337,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/61/ad/689f02752eeec26aed679477e80e632ef1b682313be70793d798c1d5fc8f/PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb", size = 22997, upload-time = "2024-11-28T03:43:27.893Z" }, ] -[package.optional-dependencies] -crypto = [ - { name = "cryptography" }, -] - [[package]] name = "pylint" version = "4.0.3" @@ -2407,43 +2368,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/93/78/b93cb80bd673bdc9f6ede63d8eb5b4646366953df15667eb3603be57a2b1/pymdown_extensions-10.17.2-py3-none-any.whl", hash = "sha256:bffae79a2e8b9e44aef0d813583a8fea63457b7a23643a43988055b7b79b4992", size = 266556, upload-time = "2025-11-26T15:43:55.162Z" }, ] -[[package]] -name = "pynacl" -version = "1.6.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/b2/46/aeca065d227e2265125aea590c9c47fbf5786128c9400ee0eb7c88931f06/pynacl-1.6.1.tar.gz", hash = "sha256:8d361dac0309f2b6ad33b349a56cd163c98430d409fa503b10b70b3ad66eaa1d", size = 3506616, upload-time = "2025-11-10T16:02:13.195Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/75/d6/4b2dca33ed512de8f54e5c6074aa06eaeb225bfbcd9b16f33a414389d6bd/pynacl-1.6.1-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:7d7c09749450c385301a3c20dca967a525152ae4608c0a096fe8464bfc3df93d", size = 389109, upload-time = "2025-11-10T16:01:28.79Z" }, - { url = "https://files.pythonhosted.org/packages/3c/30/e8dbb8ff4fa2559bbbb2187ba0d0d7faf728d17cb8396ecf4a898b22d3da/pynacl-1.6.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fc734c1696ffd49b40f7c1779c89ba908157c57345cf626be2e0719488a076d3", size = 808254, upload-time = "2025-11-10T16:01:37.839Z" }, - { url = "https://files.pythonhosted.org/packages/44/f9/f5449c652f31da00249638dbab065ad4969c635119094b79b17c3a4da2ab/pynacl-1.6.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3cd787ec1f5c155dc8ecf39b1333cfef41415dc96d392f1ce288b4fe970df489", size = 1407365, upload-time = "2025-11-10T16:01:40.454Z" }, - { url = "https://files.pythonhosted.org/packages/eb/2f/9aa5605f473b712065c0a193ebf4ad4725d7a245533f0cd7e5dcdbc78f35/pynacl-1.6.1-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b35d93ab2df03ecb3aa506be0d3c73609a51449ae0855c2e89c7ed44abde40b", size = 843842, upload-time = "2025-11-10T16:01:30.524Z" }, - { url = "https://files.pythonhosted.org/packages/32/8d/748f0f6956e207453da8f5f21a70885fbbb2e060d5c9d78e0a4a06781451/pynacl-1.6.1-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dece79aecbb8f4640a1adbb81e4aa3bfb0e98e99834884a80eb3f33c7c30e708", size = 1445559, upload-time = "2025-11-10T16:01:33.663Z" }, - { url = "https://files.pythonhosted.org/packages/78/d0/2387f0dcb0e9816f38373999e48db4728ed724d31accdd4e737473319d35/pynacl-1.6.1-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:c2228054f04bf32d558fb89bb99f163a8197d5a9bf4efa13069a7fa8d4b93fc3", size = 825791, upload-time = "2025-11-10T16:01:34.823Z" }, - { url = "https://files.pythonhosted.org/packages/18/3d/ef6fb7eb072aaf15f280bc66f26ab97e7fc9efa50fb1927683013ef47473/pynacl-1.6.1-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:2b12f1b97346f177affcdfdc78875ff42637cb40dcf79484a97dae3448083a78", size = 1410843, upload-time = "2025-11-10T16:01:36.401Z" }, - { url = "https://files.pythonhosted.org/packages/e3/fb/23824a017526850ee7d8a1cc4cd1e3e5082800522c10832edbbca8619537/pynacl-1.6.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e735c3a1bdfde3834503baf1a6d74d4a143920281cb724ba29fb84c9f49b9c48", size = 801140, upload-time = "2025-11-10T16:01:42.013Z" }, - { url = "https://files.pythonhosted.org/packages/5d/d1/ebc6b182cb98603a35635b727d62f094bc201bf610f97a3bb6357fe688d2/pynacl-1.6.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3384a454adf5d716a9fadcb5eb2e3e72cd49302d1374a60edc531c9957a9b014", size = 1371966, upload-time = "2025-11-10T16:01:43.297Z" }, - { url = "https://files.pythonhosted.org/packages/64/f4/c9d7b6f02924b1f31db546c7bd2a83a2421c6b4a8e6a2e53425c9f2802e0/pynacl-1.6.1-cp314-cp314t-win32.whl", hash = "sha256:d8615ee34d01c8e0ab3f302dcdd7b32e2bcf698ba5f4809e7cc407c8cdea7717", size = 230482, upload-time = "2025-11-10T16:01:47.688Z" }, - { url = "https://files.pythonhosted.org/packages/c4/2c/942477957fba22da7bf99131850e5ebdff66623418ab48964e78a7a8293e/pynacl-1.6.1-cp314-cp314t-win_amd64.whl", hash = "sha256:5f5b35c1a266f8a9ad22525049280a600b19edd1f785bccd01ae838437dcf935", size = 243232, upload-time = "2025-11-10T16:01:45.208Z" }, - { url = "https://files.pythonhosted.org/packages/7a/0c/bdbc0d04a53b96a765ab03aa2cf9a76ad8653d70bf1665459b9a0dedaa1c/pynacl-1.6.1-cp314-cp314t-win_arm64.whl", hash = "sha256:d984c91fe3494793b2a1fb1e91429539c6c28e9ec8209d26d25041ec599ccf63", size = 187907, upload-time = "2025-11-10T16:01:46.328Z" }, - { url = "https://files.pythonhosted.org/packages/49/41/3cfb3b4f3519f6ff62bf71bf1722547644bcfb1b05b8fdbdc300249ba113/pynacl-1.6.1-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:a6f9fd6d6639b1e81115c7f8ff16b8dedba1e8098d2756275d63d208b0e32021", size = 387591, upload-time = "2025-11-10T16:01:49.1Z" }, - { url = "https://files.pythonhosted.org/packages/18/21/b8a6563637799f617a3960f659513eccb3fcc655d5fc2be6e9dc6416826f/pynacl-1.6.1-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:e49a3f3d0da9f79c1bec2aa013261ab9fa651c7da045d376bd306cf7c1792993", size = 798866, upload-time = "2025-11-10T16:01:55.688Z" }, - { url = "https://files.pythonhosted.org/packages/e8/6c/dc38033bc3ea461e05ae8f15a81e0e67ab9a01861d352ae971c99de23e7c/pynacl-1.6.1-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7713f8977b5d25f54a811ec9efa2738ac592e846dd6e8a4d3f7578346a841078", size = 1398001, upload-time = "2025-11-10T16:01:57.101Z" }, - { url = "https://files.pythonhosted.org/packages/9f/05/3ec0796a9917100a62c5073b20c4bce7bf0fea49e99b7906d1699cc7b61b/pynacl-1.6.1-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5a3becafc1ee2e5ea7f9abc642f56b82dcf5be69b961e782a96ea52b55d8a9fc", size = 834024, upload-time = "2025-11-10T16:01:50.228Z" }, - { url = "https://files.pythonhosted.org/packages/f0/b7/ae9982be0f344f58d9c64a1c25d1f0125c79201634efe3c87305ac7cb3e3/pynacl-1.6.1-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4ce50d19f1566c391fedc8dc2f2f5be265ae214112ebe55315e41d1f36a7f0a9", size = 1436766, upload-time = "2025-11-10T16:01:51.886Z" }, - { url = "https://files.pythonhosted.org/packages/b4/51/b2ccbf89cf3025a02e044dd68a365cad593ebf70f532299f2c047d2b7714/pynacl-1.6.1-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:543f869140f67d42b9b8d47f922552d7a967e6c116aad028c9bfc5f3f3b3a7b7", size = 817275, upload-time = "2025-11-10T16:01:53.351Z" }, - { url = "https://files.pythonhosted.org/packages/a8/6c/dd9ee8214edf63ac563b08a9b30f98d116942b621d39a751ac3256694536/pynacl-1.6.1-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a2bb472458c7ca959aeeff8401b8efef329b0fc44a89d3775cffe8fad3398ad8", size = 1401891, upload-time = "2025-11-10T16:01:54.587Z" }, - { url = "https://files.pythonhosted.org/packages/0f/c1/97d3e1c83772d78ee1db3053fd674bc6c524afbace2bfe8d419fd55d7ed1/pynacl-1.6.1-cp38-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:3206fa98737fdc66d59b8782cecc3d37d30aeec4593d1c8c145825a345bba0f0", size = 772291, upload-time = "2025-11-10T16:01:58.111Z" }, - { url = "https://files.pythonhosted.org/packages/4d/ca/691ff2fe12f3bb3e43e8e8df4b806f6384593d427f635104d337b8e00291/pynacl-1.6.1-cp38-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:53543b4f3d8acb344f75fd4d49f75e6572fce139f4bfb4815a9282296ff9f4c0", size = 1370839, upload-time = "2025-11-10T16:01:59.252Z" }, - { url = "https://files.pythonhosted.org/packages/30/27/06fe5389d30391fce006442246062cc35773c84fbcad0209fbbf5e173734/pynacl-1.6.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:319de653ef84c4f04e045eb250e6101d23132372b0a61a7acf91bac0fda8e58c", size = 791371, upload-time = "2025-11-10T16:02:01.075Z" }, - { url = "https://files.pythonhosted.org/packages/2c/7a/e2bde8c9d39074a5aa046c7d7953401608d1f16f71e237f4bef3fb9d7e49/pynacl-1.6.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:262a8de6bba4aee8a66f5edf62c214b06647461c9b6b641f8cd0cb1e3b3196fe", size = 1363031, upload-time = "2025-11-10T16:02:02.656Z" }, - { url = "https://files.pythonhosted.org/packages/dd/b6/63fd77264dae1087770a1bb414bc604470f58fbc21d83822fc9c76248076/pynacl-1.6.1-cp38-abi3-win32.whl", hash = "sha256:9fd1a4eb03caf8a2fe27b515a998d26923adb9ddb68db78e35ca2875a3830dde", size = 226585, upload-time = "2025-11-10T16:02:07.116Z" }, - { url = "https://files.pythonhosted.org/packages/12/c8/b419180f3fdb72ab4d45e1d88580761c267c7ca6eda9a20dcbcba254efe6/pynacl-1.6.1-cp38-abi3-win_amd64.whl", hash = "sha256:a569a4069a7855f963940040f35e87d8bc084cb2d6347428d5ad20550a0a1a21", size = 238923, upload-time = "2025-11-10T16:02:04.401Z" }, - { url = "https://files.pythonhosted.org/packages/35/76/c34426d532e4dce7ff36e4d92cb20f4cbbd94b619964b93d24e8f5b5510f/pynacl-1.6.1-cp38-abi3-win_arm64.whl", hash = "sha256:5953e8b8cfadb10889a6e7bd0f53041a745d1b3d30111386a1bb37af171e6daf", size = 183970, upload-time = "2025-11-10T16:02:05.786Z" }, -] - [[package]] name = "pypdfium2" version = "5.1.0" @@ -3421,7 +3345,10 @@ wheels = [ ] [[package]] -name = "yattag" -version = "1.16.1" +name = "zipp" +version = "3.23.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1c/1a/d3b2a2b8f843f5e7138471c4a5c9172ef62bb41239aa4371784b7448110c/yattag-1.16.1.tar.gz", hash = "sha256:baa8f254e7ea5d3e0618281ad2ff5610e0e5360b3608e695c29bfb3b29d051f4", size = 29069, upload-time = "2024-11-02T22:38:30.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +] From 4d130d189744bc116339b165248221211265c5d0 Mon Sep 17 00:00:00 2001 From: thomashebrard Date: Thu, 15 Jan 2026 13:23:06 +0100 Subject: [PATCH 2/2] fix lint --- api/security.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/security.py b/api/security.py index d679358..1159373 100644 --- a/api/security.py +++ b/api/security.py @@ -44,7 +44,7 @@ async def verify_jwt(credentials: Annotated[HTTPAuthorizationCredentials, Depend try: # Decode and verify the JWT token from the request - payload = jwt.decode( + payload = jwt.decode( # type: ignore[reportUnknownMemberType] token, jwt_secret, algorithms=[JWT_ALGORITHM],