forked from ReversecLabs/spikee
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathpyproject.toml
More file actions
37 lines (31 loc) · 1.45 KB
/
pyproject.toml
File metadata and controls
37 lines (31 loc) · 1.45 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
[build-system]
requires = [ "setuptools>=61.0", "wheel",]
build-backend = "setuptools.build_meta"
[project]
name = "spikee"
version = "0.7.3-dev"
description = "Spikee - Simple Prompt Injection Kit for Evaluation and Exploitation"
readme = "README.md"
keywords = [ "prompt-injection", "LLM", "cyber-security", "pentesting",]
classifiers = [ "Programming Language :: Python :: 3", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent",]
dependencies = [ "tqdm==4.67.3", "rich==14.3.3", "any-llm-sdk==1.13.0", "datasets==4.8.4", "tabulate==0.9.0", "flask==3.1.3", "jinja2==3.1.6", "pandas==3.0.1", "python-dotenv==1.2.2", "InquirerPy==0.3.4", "requests==2.33.0", "tomli; python_version < '3.11'",]
[[project.authors]]
name = "Reversec"
email = "donato.capitella@reversec.com"
[project.license]
text = "Apache-2.0"
[project.optional-dependencies]
openai = []
azure = [ "any-llm-sdk[azure,azureopenai]==1.13.0",]
bedrock = [ "any-llm-sdk[bedrock]==1.13.0",]
ollama = [ "any-llm-sdk[ollama]==1.13.0",]
groq = [ "any-llm-sdk[groq]==1.13.0",]
local-inference = [ "torch", "transformers", "sentencepiece",]
all = [ "any-llm-sdk[azure,azureopenai,bedrock,ollama,groq]==1.13.0",]
[project.scripts]
spikee = "spikee.cli:main"
[tool.setuptools.package-data]
spikee = [ "data/**/*",]
[tool.setuptools.packages.find]
include = [ "spikee*",]
exclude = [ "datasets*", "results*", "targets*", "plugins*", "attacks*", "judges*", "templates*", "utilities*",]