diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000..959929b Binary files /dev/null and b/.DS_Store differ diff --git a/.gitignore b/.gitignore index cb03f4d..25cc347 100644 --- a/.gitignore +++ b/.gitignore @@ -1,11 +1,47 @@ -/venv +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# Virtual Environment +venv/ +.venv/ +ENV/ +env/ + +# IDE +.idea/ +.vscode/ +*.swp +*.swo +.cursor/ + +# Environment variables .env -src/__pycache__ -/crytic-export +# Project specific +/crytic-export /contracts.json /permissions.json -/results -# markdown generation outputs +results/ *.md -*.json \ No newline at end of file +logs/ +temp/ +.DS_Store \ No newline at end of file diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..2c07333 --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.11 diff --git a/README.md b/README.md index aa9792e..ab31e47 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,14 @@ Create and activate a virtual Python environment, and install the required Pytho python3 -m venv venv source venv/bin/activate pip install -r requirements.txt +pip install -e . # to install the this repo as a package name permission_scanner +``` + +Or use uv + +```shell +uv venv +uv pip install -r requirements.txt ``` Copy the `.env.example` file to `.env` and, depending on the network where the contracts are deployed on, fill in your RPC provider's url and a valid block explorer api key. Then load the variables with @@ -49,7 +57,13 @@ source .env Then execute the scanner script with 🚀 ```shell -python src/main.py +python example/run_scanner.py +``` + +Or use uv + +```shell +uv run example/run_scanner.py ``` ### Results diff --git a/example/run_scanner.py b/example/run_scanner.py new file mode 100644 index 0000000..d6ddb83 --- /dev/null +++ b/example/run_scanner.py @@ -0,0 +1,90 @@ +import os +import json +import logging +from dotenv import load_dotenv +from permission_scanner import ContractScanner +from permission_scanner.utils.markdown_generator import generate_full_markdown + + +def load_config_from_file(file_path: str) -> dict: + """Load configuration from a JSON file. + + Args: + file_path (str): Path to the configuration file + + Returns: + dict: Configuration data + + Raises: + FileNotFoundError: If the config file doesn't exist + json.JSONDecodeError: If the config file is not valid JSON + """ + try: + with open(file_path, "r") as file: + return json.load(file) + except FileNotFoundError: + raise + except json.JSONDecodeError as e: + raise + + +def main(): + """Main function to run the contract scanner.""" + try: + # Load environment variables + load_dotenv() + + # Load contracts from json + config_json = load_config_from_file("temp/contracts_kodiak_error.json") + contracts_addresses = config_json["Contracts"] + project_name = config_json["Project_Name"] + chain_name = config_json["Chain_Name"] + + # Setup environment variables + block_explorer_api_key = os.getenv("BERASCAN_API_KEY") + rpc_url = os.getenv("RPC_URL") + + if not block_explorer_api_key or not rpc_url: + raise ValueError("Missing required environment variables") + + export_dir = f"temp/kodiak_error" + + # Scan each contract + all_scan_results = {} + all_contract_data_for_markdown = [] + + for address in contracts_addresses: + # initiate scanner for each address + scanner = ContractScanner( + project_name=project_name, + address=address, + chain_name=chain_name, + block_explorer_api_key=block_explorer_api_key, + rpc_url=rpc_url, + export_dir=export_dir, + ) + final_result, contract_data_for_markdown = scanner.scan() + all_scan_results.update(final_result) + all_contract_data_for_markdown += contract_data_for_markdown + + report_dir = f"{export_dir}/{project_name}-reports" + os.makedirs(report_dir, exist_ok=True) + permissions_json_path = os.path.join(report_dir, "permissions.json") + markdown_path = os.path.join(report_dir, "markdown.md") + + # save permissions.json + with open(permissions_json_path, "w") as f: + json.dump(all_scan_results, f, indent=4) + + # save markdown.md + markdown_content = generate_full_markdown( + project_name, all_contract_data_for_markdown, all_scan_results + ) + with open(markdown_path, "w") as f: + f.write(markdown_content) + except Exception as e: + raise + + +if __name__ == "__main__": + main() diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..9adcd38 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,14 @@ +[project] +name = "permission_scanner" +version = "0.1.0" +description = "Add your description here" +readme = "README.md" +requires-python = ">=3.11" +dependencies = [] + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build.targets.wheel] +packages = ["src/permission_scanner"] diff --git a/src/etherscan.py b/src/etherscan.py deleted file mode 100644 index dff298b..0000000 --- a/src/etherscan.py +++ /dev/null @@ -1,47 +0,0 @@ -import os - -import urllib.request -import urllib.parse -import json - -def get_etherscan_url() -> str: - etherscan_url = os.getenv("ETHERSCAN_API_KEY") - - if etherscan_url is None: - raise KeyError("Please set a etherscan api key in your .env") - - return etherscan_url - - -def fetch_contract_metadata(address, apikey, chainid=1): - base_url = "https://api.etherscan.io/v2/api" - params = { - "chainid": chainid, - "module": "contract", - "action": "getsourcecode", - "address": address, - "apikey": apikey - } - url = f"{base_url}?{urllib.parse.urlencode(params)}" - - try: - with urllib.request.urlopen(url) as response: - if response.status != 200: - raise Exception(f"HTTP error {response.status}") - data = json.load(response) - except Exception as e: - raise RuntimeError(f"Request failed: {e}") - - if data.get("status") != "1": - raise ValueError(f"API error: {data.get('message', 'Unknown error')}") - - result = data.get("result", []) - if not result: - raise ValueError("No contract data found") - - contract_info = result[0] - return { - "ContractName": contract_info.get("ContractName"), - "Proxy": contract_info.get("Proxy") == "1", - "Implementation": contract_info.get("Implementation") - } diff --git a/src/get_rpc_url.py b/src/get_rpc_url.py deleted file mode 100644 index 476b91a..0000000 --- a/src/get_rpc_url.py +++ /dev/null @@ -1,76 +0,0 @@ -import os - -def get_rpc_url(network: str) -> str: - rpc_urls = { - "mainnet": os.getenv("MAINNET_RPC"), - "bsc": os.getenv("BSC_RPC"), - "poly": os.getenv("POLYGON_RPC"), - "polyzk": os.getenv("POLYGON_ZK_RPC"), - "base": os.getenv("BASE_RPC"), - "arbi": os.getenv("ARBITRUM_RPC"), - "nova.arbi": os.getenv("NOVA_ARBITRUM_RPC"), - "linea": os.getenv("LINEA_RPC"), - "ftm": os.getenv("FANTOM_RPC"), - "blast": os.getenv("BLAST_RPC"), - "optim": os.getenv("OPTIMISTIC_RPC"), - "avax": os.getenv("AVAX_RPC"), - "bttc": os.getenv("BTTC_RPC"), - "celo": os.getenv("CELO_RPC"), - "cronos": os.getenv("CRONOS_RPC"), - "frax": os.getenv("FRAX_RPC"), - "gno": os.getenv("GNOSIS_RPC"), - "kroma": os.getenv("KROMA_RPC"), - "mantle": os.getenv("MANTLE_RPC"), - "moonbeam": os.getenv("MOONBEAM_RPC"), - "moonriver": os.getenv("MOONRIVER_RPC"), - "opbnb": os.getenv("OPBNB_RPC"), - "scroll": os.getenv("SCROLL_RPC"), - "taiko": os.getenv("TAIKO_RPC"), - "wemix": os.getenv("WEMIX_RPC"), - "era.zksync": os.getenv("ZKSYNC_ERA_RPC"), - "xai": os.getenv("XAI_RPC"), - } - - rpc_url = rpc_urls.get(network) - - if rpc_url is None: - raise KeyError(f"Network '{network}' not found in pre-configured chains. Please set your network in get_rpc_url.py") - - return rpc_url - - -def get_chain_id(network: str) -> int: - chain_ids = { - "mainnet": 1, - "bsc": 56, - "poly": 137, - "polyzk": 1101, - "base": 8453, - "arbi": 42161, - "nova.arbi": 42170, - "linea": 59144, - "ftm": 250, - "blast": 81457, - "optim": 10, - "avax": 43114, - "bttc": 199, - "celo": 42220, - "cronos": 25, - "frax": 252, - "gno": 100, - "kroma": 255, - "mantle": 5000, - "moonbeam": 1284, - "moonriver": 1285, - "opbnb": 204, - "scroll": 534352, - "taiko": 167000, - "wemix": 1111, - "era.zksync": 324, - "xai": 660279, - } - - if network not in chain_ids: - raise ValueError(f"Unknown network name: {network}") - - return chain_ids[network] diff --git a/src/main.py b/src/main.py deleted file mode 100644 index f11634d..0000000 --- a/src/main.py +++ /dev/null @@ -1,326 +0,0 @@ -from slither.slither import Slither -from slither.core.declarations.function import Function -from slither.core.declarations.contract import Contract - -from slither.tools.read_storage.read_storage import SlitherReadStorage, RpcInfo, get_storage_data - -import json -from typing import List -import urllib.error - -from parse import init_args -from get_rpc_url import get_rpc_url, get_chain_id -from etherscan import get_etherscan_url, fetch_contract_metadata -from dotenv import load_dotenv - -import re - -from markdown_generator import generate_full_markdown - - -def load_config_from_file(file_path: str) -> dict: - with open(file_path, 'r') as file: - return json.load(file) - - -def is_valid_eth_address(address: str) -> bool: - return bool(re.fullmatch(r"0x[a-fA-F0-9]{40}", address)) - -# check for msg.sender checks -def get_msg_sender_checks(function: Function) -> List[str]: - all_functions = ( - [f for f in function.all_internal_calls() if isinstance(f, Function)] - + [m for f in function.all_internal_calls() if isinstance(f, Function) for m in f.modifiers] - + [function] - + [m for m in function.modifiers if isinstance(m, Function)] - + [call for call in function.all_library_calls() if isinstance(call, Function)] - + [m for call in function.all_library_calls() if isinstance(call, Function) for m in call.modifiers] - ) - - all_nodes_ = [f.nodes for f in all_functions] - all_nodes = [item for sublist in all_nodes_ for item in sublist] - - all_conditional_nodes = [ - n for n in all_nodes if n.contains_if() or n.contains_require_or_assert() - ] - all_conditional_nodes_on_msg_sender = [ - str(n.expression) - for n in all_conditional_nodes - if "msg.sender" in [v.name for v in n.solidity_variables_read] - ] - return all_conditional_nodes_on_msg_sender - - -def get_permissions(contract: Contract, result: dict, all_state_variables_read: List[str], isProxy: bool, index: int): - - temp = { - "Contract_Name": contract.name, - "Functions": [] - } - - for function in contract.functions: - # 1) list all modifiers in function - # for output analysis - modifiers = function.modifiers - for call in function.all_internal_calls(): - if isinstance(call, Function): - modifiers += call.modifiers - for call in function.all_library_calls(): - if isinstance(call, Function): - modifiers += call.modifiers - - listOfModifiers = sorted([m.name for m in set(modifiers)]) - - - # 2) detect conditions on msg.sender - # in the full function scope - msg_sender_condition = get_msg_sender_checks(function) - - if (len(modifiers) == 0 and len(msg_sender_condition) == 0): - # no permission detected - continue - - # list all state variables that are read - # the variables available in storage will be read - state_variables_read_inside_modifiers = [ - v.name - for modifier in modifiers if modifier is not None - for v in modifier.all_variables_read() if v is not None and v.name - ] - - state_variables_read_inside_function = [ - v.name for v in function.all_state_variables_read() if v.name - ] - - all_state_variables_read_this_func = [] - all_state_variables_read_this_func.extend(state_variables_read_inside_modifiers) - all_state_variables_read_this_func.extend(state_variables_read_inside_function) - all_state_variables_read_this_func = list(set(all_state_variables_read_this_func)) - - all_state_variables_read.extend(all_state_variables_read_this_func) - - # 3) list all state variables that are written to inside this function - state_variables_written = [ - v.name for v in function.all_state_variables_written() if v.name - ] - - # 4) write everything to dict - temp['Functions'].append({ - "Function": function.name, - "Modifiers": listOfModifiers, - "msg.sender_conditions": msg_sender_condition, - "state_variables_read": all_state_variables_read_this_func, - "state_variables_written": state_variables_written - }) - - # dump to result dict - if isProxy and index == 0: - result["proxy_permissions"] = temp - elif isProxy and index == 1: - result["permissions"] = temp - else: - # is normal contract - result["permissions"] = temp - -def main(): - load_dotenv() # Load environment variables from .env file - - # load contracts from json - config_json = load_config_from_file("contracts.json") - - contracts_addresses = config_json["Contracts"] - contract_data_for_markdown = [] - project_name = config_json["Project_Name"] - chain_name = config_json["Chain_Name"] - - rpc_url = get_rpc_url(chain_name) - platform_key = get_etherscan_url() - - result = {} - - # instantiate slither rpc class - rpc_info = RpcInfo(rpc_url, "latest") - - for contract_address in contracts_addresses: - contract_result = fetch_contract_metadata(address=contract_address, apikey=platform_key, chainid=get_chain_id(chain_name)) - contract_name = contract_result["ContractName"] - isProxy = contract_result["Proxy"] == 1 - implementation_address = contract_result["Implementation"] - implementation_name = "" - contract_data_for_markdown.append({"name": contract_name, "address": contract_address}) - - if isProxy and implementation_address: - if not isinstance(implementation_address, str) or not is_valid_eth_address(implementation_address): - raise ValueError(f"Invalid implementation address for proxy: {implementation_address}") - try: - implementation_result = fetch_contract_metadata( - address=implementation_address, - apikey=platform_key, - chainid=get_chain_id(chain_name) - ) - implementation_name = implementation_result.get("ContractName") or "" - contract_data_for_markdown.append({"name": implementation_name, "address": implementation_address}) - except Exception as e: - raise f"Failed to get Implementation contract from Etherscan. \n\n\n + {e}" - - - target_storage_vars = [] # target storage variables of this contract - temp_global = {} - - # setup args for slither - args = init_args(project_name, contract_address, chain_name, rpc_url, platform_key, contract_name) - target = args.contract_source - - try: - slither = Slither(target, **vars(args)) - except urllib.error.HTTPError as e: - print(f"\033[33mFailed to compile contract at {contract_address} due to HTTP error: {e}\033[0m") - continue # Skip this contract and move to the next one - except Exception as e: - print(f"\033[33mAn error occurred while analyzing {contract_address}: {e}\033[0m") - continue - - # retrieved contracts from the address (inherited and interacted contracts) - contracts = slither.contracts - - # only take the one contract that is in the key - # this filters out interacted contracts (we dont need the permissions of them) - # does not exclude inherited contracts - target_contract = [contract for contract in contracts if contract.name == contract_name] - - if len(target_contract) == 0: - raise Exception(f"\033[31m\n \nThe contract name supplied in contract.json does not match any of the found contract names for this address: {contract_address}\033[0m") - - srs = SlitherReadStorage(target_contract, args.max_depth, rpc_info) - srs.unstructured = False - # Remove target prefix "mainnet:" e.g. mainnet:0x0 -> 0x0. - address = target[target.find(":") + 1 :] - srs.storage_address = address - - if isProxy: - # step 1: create slither object again, but with implementation address - # -> run analysis of storage layout and permissions of implementation address - # step 2: read storage from proxy contract (location of storage) contract_address["address"] - - # scan the implementation address - slither = Slither(f'{chain_name}:{implementation_address}', **vars(args)) - - # get all the instantiated contracts (includes also interacted contracts) from the implementation contract - implementation_contracts = slither.contracts_derived - - # find the instantiated/main implementation contract - - target_contract.extend([contract for contract in implementation_contracts if contract.name == implementation_name]) - if len(target_contract) == 1: - raise Exception(f"\033[31m\n \nThe implementation name supplied in contract.json does not match any of the found implementation contract names for this address: {contract_address['address']}\033[0m") - temp_global["Implementation_Address"] = implementation_address - temp_global["Proxy_Address"] = contract_address - - if not isProxy: - temp_global["Address"] = contract_address - - # end setup - ################################################## - ################################################## - ################################################## - - ################################################## - ################################################## - ################################################## - # start analysis - - - # start analysis of main contract (can be proxy, then also the implementation contract is analysed) - for i, contract in enumerate(target_contract): - # get permissions and store inside target_storage_vars - get_permissions(contract, temp_global, target_storage_vars, isProxy, i) - - target_storage_vars = list(set(target_storage_vars)) # remove duplicates - - # Three steps to retrieve storage variables with slither - # 1. set target variables - # 2. compute storage keys - # 3. retrieve slots from the keys - - # sets target variables - # adapted logic, extracted from method `get_all_storage_variables` of SlitherReadStorage class - for contract in srs._contracts: - for var in contract.state_variables_ordered: - if var.name in target_storage_vars: - # achieve step 1. - srs._target_variables.append((contract, var)) - - # add all constant and immutable variable to a list to do the required look-up - if not var.is_stored: - - # functionData is a dict - for functionData in temp_global["permissions"]["Functions"]: - # check if e.g storage variable owner is part of this function - if var.name in functionData["state_variables_read"]: - # check if already added some constants/immutables - - # Ensure key exists - if "immutables_and_constants" not in functionData: - functionData["immutables_and_constants"] = [] - - # Check if the variable has an expression and is not the proxy marker - if var.expression and str(var.expression) != "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc": - try: - raw_value = get_storage_data( - srs.rpc_info.web3, contract_address, - str(var.expression), srs.rpc_info.block - ) - value = srs.convert_value_to_type(raw_value, 160, 0, "address") - functionData["immutables_and_constants"].append( - {"name": var.name, "slot": str(var.expression), "value": value} - ) - except Exception: - functionData["immutables_and_constants"].append( - {"name": var.name, "slot": str(var.expression)} - ) - else: - functionData["immutables_and_constants"].append({"name": var.name}) - - # step 2. computes storage keys for target variables - srs.get_target_variables() - - # step 3. get the values of the target variables and their slots - try: - srs.walk_slot_info(srs.get_slot_values) - except urllib.error.HTTPError as e: - print(f"\033[33mFailed to fetch storage from contract at {contract_address} due to HTTP error: {e}\033[0m") - continue # Skip this contract and move to the next one - except Exception as e: - print(f"\033[33mAn error occurred while fetching storage slots from contract {contract_address}: {e}\033[0m") - continue - - storageValues = {} - # merge storage retrieval with contracts - for key, value in srs.slot_info.items(): - contractDict = temp_global["permissions"] - storageValues[value.name] = value.value - # contractDict["Functions"] is a list, functionData a dict - for functionData in contractDict["Functions"]: - # check if e.g storage variable owner is part of this function - if value.name in functionData["state_variables_read"]: - # if so, add a key value pair to the functionData object, to improve readability of report - functionData[value.name] = value.value - - if len(storageValues.values()): - contractDict["storage_values"] = storageValues - - if len(implementation_name) > 0: - result[implementation_name] = temp_global - else: - result[contract_name] = temp_global - - with open("permissions.json","w") as file: - json.dump(result, file, indent=4) - - content = generate_full_markdown("", contract_data_for_markdown, result) - - with open("markdown.md", "w") as file: - file.write(content) - - -main() diff --git a/src/parse.py b/src/parse.py deleted file mode 100644 index ac977fa..0000000 --- a/src/parse.py +++ /dev/null @@ -1,42 +0,0 @@ -from crytic_compile import cryticparser -from argparse import ArgumentParser, Namespace - - -def init_args(project_name: str, contract_address: str, chain_name: str, rpc_url: str, platform_key: str, contract_name: str) -> Namespace: - """Parse the underlying arguments for the program. - Returns: - The arguments for the program. - """ - - # create a ArgumentParser for cryticparser.init - parser = ArgumentParser( - description="Read a variable's value from storage for a deployed contract", - usage=("\nProvide secrets in env file\n"), - ) - - # Add arguments (this step is required before setting defaults) - parser.add_argument("--contract_source", nargs="+", help="Contract address or project directory") - parser.add_argument("--export-dir", help="where downloaded files should be stored") - parser.add_argument("--rpc-url", help="RPC endpoint URL") - parser.add_argument("--etherscan-api-key", help="Etherscan API key") - parser.add_argument("--max-depth", help="Max depth to search in data structure.", default=20) - parser.add_argument( - "--block", - help="The block number to read storage from. Requires an archive node to be provided as the RPC url.", - default="latest", - ) - - - # Set defaults for arguments programmatically - # Hyphens (-) in argument names are automatically converted to underscores (_) - parser.set_defaults( - contract_source=f'{chain_name}:{contract_address}', - rpc_url=rpc_url, - etherscan_api_key=platform_key, - export_dir=f'results/{project_name}/{contract_name}' - ) - - # requires a ArgumentParser instance - cryticparser.init(parser) - - return parser.parse_args() diff --git a/src/permission_scanner/__init__.py b/src/permission_scanner/__init__.py new file mode 100644 index 0000000..a20fbc5 --- /dev/null +++ b/src/permission_scanner/__init__.py @@ -0,0 +1,4 @@ +from .scanner.scanner import ContractScanner +from .utils.block_explorer import BlockExplorer + +__all__ = ["ContractScanner", "BlockExplorer"] diff --git a/src/permission_scanner/__pycache__/__init__.cpython-311.pyc b/src/permission_scanner/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..b5a9b5e Binary files /dev/null and b/src/permission_scanner/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/access_control.py b/src/permission_scanner/scanner/__init__.py similarity index 100% rename from src/access_control.py rename to src/permission_scanner/scanner/__init__.py diff --git a/src/permission_scanner/scanner/__pycache__/__init__.cpython-311.pyc b/src/permission_scanner/scanner/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..99185b1 Binary files /dev/null and b/src/permission_scanner/scanner/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/permission_scanner/scanner/__pycache__/etherscan.cpython-311.pyc b/src/permission_scanner/scanner/__pycache__/etherscan.cpython-311.pyc new file mode 100644 index 0000000..0f59c26 Binary files /dev/null and b/src/permission_scanner/scanner/__pycache__/etherscan.cpython-311.pyc differ diff --git a/src/permission_scanner/scanner/__pycache__/scanner.cpython-311.pyc b/src/permission_scanner/scanner/__pycache__/scanner.cpython-311.pyc new file mode 100644 index 0000000..76ca1da Binary files /dev/null and b/src/permission_scanner/scanner/__pycache__/scanner.cpython-311.pyc differ diff --git a/src/permission_scanner/scanner/scanner.py b/src/permission_scanner/scanner/scanner.py new file mode 100644 index 0000000..4dff08d --- /dev/null +++ b/src/permission_scanner/scanner/scanner.py @@ -0,0 +1,394 @@ +from typing import List, Dict, Any, Tuple +import urllib.error +import re +import json +from slither import Slither +from slither.core.declarations.function import Function +from slither.core.declarations.contract import Contract +from slither.tools.read_storage.read_storage import ( + SlitherReadStorage, + RpcInfo, + get_storage_data, +) + +from ..utils.block_explorer import BlockExplorer + + +class ContractScanner: + """Service for scanning smart contracts for permissions and storage.""" + + def __init__( + self, + chain_name: str, + project_name: str, + address: str, + block_explorer_api_key: str, + rpc_url: str, + export_dir: str = "results", + ): + """Initialize the ContractScanner. + + Args: + rpc_url (str): The RPC URL for the blockchain network + block_explorer (BlockExplorer): The block explorer instance for fetching contract metadata + export_dir (str): Directory to save Solidity files and crytic_compile.config.json + """ + self.project_name = project_name + self.address = address + self.chain_name = chain_name + self.block_explorer_api_key = block_explorer_api_key + self.rpc_url = rpc_url + self.export_dir = export_dir + self.permissions_results = {} + self.target_storage_vars = [] + self.contract_data_for_markdown = [] + self.scan_result = {} + self.implementation_name = None + self.block_explorer = BlockExplorer( + api_key=self.block_explorer_api_key, chain_name=chain_name + ) + + @staticmethod + def _is_valid_eth_address(address: str) -> bool: + """Check if a string is a valid Ethereum address.""" + return bool(re.fullmatch(r"0x[a-fA-F0-9]{40}", address)) + + @staticmethod + def _get_msg_sender_checks(function: Function) -> List[str]: + """Get all msg.sender checks in a function and its internal calls.""" + all_functions = ( + [f for f in function.all_internal_calls() if isinstance(f, Function)] + + [ + m + for f in function.all_internal_calls() + if isinstance(f, Function) + for m in f.modifiers + ] + + [function] + + [m for m in function.modifiers if isinstance(m, Function)] + + [ + call + for call in function.all_library_calls() + if isinstance(call, Function) + ] + + [ + m + for call in function.all_library_calls() + if isinstance(call, Function) + for m in call.modifiers + ] + ) + + all_nodes_ = [f.nodes for f in all_functions] + all_nodes = [item for sublist in all_nodes_ for item in sublist] + + all_conditional_nodes = [ + n for n in all_nodes if n.contains_if() or n.contains_require_or_assert() + ] + all_conditional_nodes_on_msg_sender = [ + str(n.expression) + for n in all_conditional_nodes + if "msg.sender" in [v.name for v in n.solidity_variables_read] + ] + return all_conditional_nodes_on_msg_sender + + def _scan_permissions(self, contract: Contract) -> Dict[str, Any]: + """Analyze permissions in a contract and store results. + + Args: + contract (Contract): The contract to analyze + all_state_variables_read (List[str]): List of state variables read + is_proxy (bool): Whether the contract is a proxy + index (int): Index for proxy/implementation contract + """ + result_dict = {"Contract_Name": contract.name, "Functions": []} + + for function in contract.functions: + # Get all modifiers + modifiers = function.modifiers + for call in function.all_internal_calls(): + if isinstance(call, Function): + modifiers += call.modifiers + for call in function.all_library_calls(): + if isinstance(call, Function): + modifiers += call.modifiers + + list_of_modifiers = sorted([m.name for m in set(modifiers)]) + + # Get msg.sender conditions + msg_sender_condition = self._get_msg_sender_checks(function) + + if len(modifiers) == 0 and len(msg_sender_condition) == 0: + continue + + # Get state variables read + state_variables_read_inside_modifiers = [ + v.name + for modifier in modifiers + if modifier is not None + for v in modifier.all_variables_read() + if v is not None and v.name + ] + + state_variables_read_inside_function = [ + v.name for v in function.all_state_variables_read() if v.name + ] + + all_state_variables_read_this_func = [] + all_state_variables_read_this_func.extend( + state_variables_read_inside_modifiers + ) + all_state_variables_read_this_func.extend( + state_variables_read_inside_function + ) + all_state_variables_read_this_func = list( + set(all_state_variables_read_this_func) + ) + + self.target_storage_vars.extend(all_state_variables_read_this_func) + + # Get state variables written + state_variables_written = [ + v.name for v in function.all_state_variables_written() if v.name + ] + + # Store results + result_dict["Functions"].append( + { + "Function": function.name, + "Modifiers": list_of_modifiers, + "msg.sender_conditions": msg_sender_condition, + "state_variables_read": all_state_variables_read_this_func, + "state_variables_written": state_variables_written, + } + ) + + return result_dict + + def _scan_storage( + self, + storage_scanner: SlitherReadStorage, + permissions_result: Dict[str, Any], + contract_address: str, + ) -> Dict[str, Any]: + """Scan contract storage. + + Args: + storage_scanner (SlitherReadStorage): Initialized storage scanner + permissions_result (Dict[str, Any]): Results from permission scan + contract_address (str): Contract address to scan + + Returns: + Dict[str, Any]: Storage analysis results + """ + # sets target variables + # adapted logic, extracted from method `get_all_storage_variables` of SlitherReadStorage class + for contract in storage_scanner._contracts: + for var in contract.state_variables_ordered: + if var.name in self.target_storage_vars: + # achieve step 1. + storage_scanner._target_variables.append((contract, var)) + + # add all constant and immutable variable to a list to do the required look-up + if not var.is_stored: + + # functionData is a dict + for functionData in permissions_result["Functions"]: + # check if e.g storage variable owner is part of this function + if var.name in functionData["state_variables_read"]: + # check if already added some constants/immutables + + # Ensure key exists + if "immutables_and_constants" not in functionData: + functionData["immutables_and_constants"] = [] + + # Check if the variable has an expression and is not the proxy marker + if ( + var.expression + and str(var.expression) + != "0x360894a13ba1a3210667c828492db98dca3e2076cc3735a920a3ca505d382bbc" + ): + try: + raw_value = get_storage_data( + storage_scanner.rpc_info.web3, + contract_address, + str(var.expression), + storage_scanner.rpc_info.block, + ) + value = storage_scanner.convert_value_to_type( + raw_value, 160, 0, "address" + ) + functionData["immutables_and_constants"].append( + { + "name": var.name, + "slot": str(var.expression), + "value": value, + } + ) + except Exception: + functionData["immutables_and_constants"].append( + {"name": var.name, "slot": str(var.expression)} + ) + else: + functionData["immutables_and_constants"].append( + {"name": var.name} + ) + + # step 2. computes storage keys for target variables + storage_scanner.get_target_variables() + + # step 3. get the values of the target variables and their slots + try: + storage_scanner.walk_slot_info(storage_scanner.get_slot_values) + except urllib.error.HTTPError as e: + print( + f"\033[33mFailed to fetch storage from contract at {contract_address} due to HTTP error: {e}\033[0m" + ) + except Exception as e: + print( + f"\033[33mAn error occurred while fetching storage slots from contract {contract_address}: {e}\033[0m" + ) + + storageValues = {} + # merge storage retrieval with contracts + for key, value in storage_scanner.slot_info.items(): + contractDict = permissions_result + storageValues[value.name] = value.value + # contractDict["Functions"] is a list, functionData a dict + for functionData in contractDict["Functions"]: + # check if e.g storage variable owner is part of this function + if value.name in functionData["state_variables_read"]: + # if so, add a key value pair to the functionData object, to improve readability of report + functionData[value.name] = value.value + + return storageValues + + def _check_proxy(self, contract_metadata: Dict[str, Any]): + """Handle proxy contract logic. + + Args: + contract_metadata (Dict[str, Any]): Metadata of the contract + + Returns: + tuple: (isProxy, implementation_address) + """ + isProxy = contract_metadata["Proxy"] == 1 + implementation_address = contract_metadata["Implementation"] + implementation_name = None + + if isProxy and implementation_address: + if not isinstance( + implementation_address, str + ) or not self._is_valid_eth_address(implementation_address): + raise ValueError( + f"Invalid implementation address for proxy: {implementation_address}" + ) + try: + implementation_result = self.block_explorer.get_contract_metadata( + implementation_address + ) + implementation_name = implementation_result.get("ContractName", None) + self.implementation_name = implementation_name + self.contract_data_for_markdown.append( + {"name": implementation_name, "address": implementation_address} + ) + except Exception as e: + raise f"Failed to get Implementation contract from Etherscan. \n\n\n + {e}" + + def scan(self) -> Tuple[Dict[str, Any], List[Dict[str, Any]]]: + """Scan a contract for permissions and storage. + + Args: + contract_address (str): The contract address to scan + + Returns: + Dict[str, Any]: The scan results containing permissions and storage analysis + """ + final_scan_result = {} + contract_metadata = self.block_explorer.get_contract_metadata(self.address) + with open( + f"{self.export_dir}/{self.project_name}-contracts/contract_metadata.json", + "w", + ) as f: + json.dump(contract_metadata, f) + contract_name = contract_metadata["ContractName"] + isProxy = contract_metadata["Proxy"] == 1 + + self.contract_name = contract_name + self.contract_data_for_markdown.append( + {"name": contract_name, "address": self.address} + ) + self._check_proxy(contract_metadata) + + # Initialize scan_result structure + self.scan_result[contract_name] = {} + + slither = Slither( + f"{self.chain_name}:{self.address}", + export_dir=f"{self.export_dir}/{self.project_name}-contracts/{contract_name}", + allow_path=f"{self.export_dir}/{self.project_name}-contracts", + ) + + # Get target contract from slither + target_contract = [c for c in slither.contracts if c.name == contract_name] + if not target_contract: + raise ValueError(f"Contract {contract_name} not found in source code") + + # Initialize storage scanner + rpc_info = RpcInfo(self.rpc_url, "latest") + srs = SlitherReadStorage(target_contract, max_depth=5, rpc_info=rpc_info) + srs.unstructured = False + srs.storage_address = self.address + + # If proxy, scan implementation + if isProxy: + impl_address = contract_metadata["Implementation"] + impl_slither = Slither( + f"{self.chain_name}:{impl_address}", + export_dir=f"{self.export_dir}/{self.project_name}-contracts/{self.implementation_name}", + allow_path=f"{self.export_dir}/{self.project_name}-contracts", + ) + # Get implementation contract + impl_contracts = impl_slither.contracts_derived + + # find the instantiated/main implementation contract + target_contract.extend( + [ + contract + for contract in impl_contracts + if contract.name == self.implementation_name + ] + ) + if len(target_contract) == 1: + raise Exception( + f"\033[31m\n \nThe implementation name supplied in contract.json does not match any of the found implementation contract names for this address: {self.address}\033[0m" + ) + self.scan_result["Implementation_Address"] = impl_address + self.scan_result["Proxy_Address"] = self.address + if not isProxy: + self.scan_result["Address"] = self.address + + for i, contract in enumerate(target_contract): + # get permissions and store inside target_storage_vars + _scan_permissions_result = self._scan_permissions(contract) + if isProxy and i == 0: + self.scan_result["proxy_permissions"] = _scan_permissions_result + else: + self.scan_result["permissions"] = _scan_permissions_result + + self.target_storage_vars = list( + set(self.target_storage_vars) + ) # remove duplicates + + # Scan storage + permissions_result = self.scan_result["permissions"] + storage_result = self._scan_storage(srs, permissions_result, self.address) + if len(storage_result.values()): + self.scan_result["storage_values"] = storage_result + + if self.implementation_name: + final_scan_result[self.implementation_name] = self.scan_result + else: + final_scan_result[self.contract_name] = self.scan_result + + return final_scan_result, self.contract_data_for_markdown diff --git a/src/permission_scanner/utils/__init__.py b/src/permission_scanner/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/permission_scanner/utils/__pycache__/__init__.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/__init__.cpython-311.pyc new file mode 100644 index 0000000..efa5ee5 Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/__init__.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/block_explorer.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/block_explorer.cpython-311.pyc new file mode 100644 index 0000000..6ea19ce Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/block_explorer.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/contract.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/contract.cpython-311.pyc new file mode 100644 index 0000000..f6b7a3e Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/contract.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/etherscan.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/etherscan.cpython-311.pyc new file mode 100644 index 0000000..48057f9 Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/etherscan.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/function.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/function.cpython-311.pyc new file mode 100644 index 0000000..42e5502 Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/function.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/logger.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/logger.cpython-311.pyc new file mode 100644 index 0000000..0e94f33 Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/logger.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/markdown_generator.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/markdown_generator.cpython-311.pyc new file mode 100644 index 0000000..d58267e Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/markdown_generator.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/reporter.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/reporter.cpython-311.pyc new file mode 100644 index 0000000..4111be2 Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/reporter.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/reporters.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/reporters.cpython-311.pyc new file mode 100644 index 0000000..6c8583c Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/reporters.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/scanner_config.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/scanner_config.cpython-311.pyc new file mode 100644 index 0000000..411ecdf Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/scanner_config.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/__pycache__/validators.cpython-311.pyc b/src/permission_scanner/utils/__pycache__/validators.cpython-311.pyc new file mode 100644 index 0000000..b55bcfe Binary files /dev/null and b/src/permission_scanner/utils/__pycache__/validators.cpython-311.pyc differ diff --git a/src/permission_scanner/utils/block_explorer.py b/src/permission_scanner/utils/block_explorer.py new file mode 100644 index 0000000..9069229 --- /dev/null +++ b/src/permission_scanner/utils/block_explorer.py @@ -0,0 +1,269 @@ +import requests +from typing import Dict, Optional, Any +import json +from pathlib import Path +import os +from json import JSONDecodeError + + +# Read the config file from the same directory as this script +with open(Path(__file__).parent / "block_explorer_config.json", "r") as f: + block_explore_config = json.load(f) + + +class BlockExplorer: + """Service for interacting with Etherscan API.""" + + def __init__(self, api_key: str, chain_name: str): + if not api_key: + raise ValueError("API key is required") + self.api_key = api_key + self.chain_name = chain_name + try: + self.base_url = block_explore_config[chain_name]["base_url"] + self.chainid = block_explore_config[chain_name].get("chainid", None) + except KeyError: + raise ValueError( + f"Unsupported chain: {chain_name}. Supported chains are: {', '.join(block_explore_config.keys())}" + ) + self.sourcecode = {} + + def _make_request(self, module: str, action: str, address: str) -> Dict[str, Any]: + """ + Make a request to the BeraScan API. + + Args: + module (str): The API module to call. + action (str): The action to perform. + address (str): The contract address. + chain_id (int, optional): The chain ID, etherscan has v2 api that supports 50+ chains + Returns: + dict: The API response data. + + Raises: + requests.exceptions.RequestException: If the API request fails. + ValueError: If the response indicates an error. + """ + params = { + "module": module, + "action": action, + "address": address, + "apikey": self.api_key, + } + if self.chainid: + params["chainid"] = self.chainid + + try: + with requests.get(self.base_url, params=params) as response: + if response.status_code != 200: + raise ValueError(f"API Error: {response.text}") + data = response.json() + except Exception as e: + raise RuntimeError(f"Request failed: {e}") + + if data["status"] != "1": + raise ValueError(f"API Error: {data.get('message', 'Unknown error')}") + + return data["result"] + + def fetch_source_code(self, address: str) -> Dict[str, Any]: + """ + Fetch the source code for a verified contract. + + Args: + contract_address (str): The address of the contract to fetch source code for. + + Returns: + Dict[str, Any]: The contract source code information including: + - SourceCode: The actual source code + - ABI: The contract ABI + - ContractName: The name of the contract + - CompilerVersion: The compiler version used + - OptimizationUsed: Whether optimization was used + - Runs: Number of optimization runs + - ConstructorArguments: Constructor arguments + - Library: Library information + - LicenseType: The license type + - Proxy: Whether the contract is a proxy + - Implementation: Implementation address if proxy + - SwarmSource: Swarm source if available + """ + sourcecode = self.sourcecode.get(address, None) + if sourcecode is None: + result = self._make_request( + module="contract", + action="getsourcecode", + address=address, + ) + if isinstance(result, list) and len(result) > 0: + self.sourcecode[address] = result[0] + else: + raise ValueError(f"No source code found for contract {address}") + + def get_contract_metadata(self, address: str) -> Dict: + """ + Fetch contract metadata from Etherscan, + including contract name, proxy status, and implementation address. + """ + self.fetch_source_code(address) + all_data = self.sourcecode.get(address) + if all_data is None: + raise ValueError(f"No source code found for contract {address}") + metadata = { + "ContractName": all_data.get("ContractName"), + "Proxy": all_data.get("Proxy") == "1", + "Implementation": all_data.get("Implementation"), + # "CompilerVersion": all_data.get("CompilerVersion"), + # "Library": all_data.get("Library"), + } + return metadata + + def save_sourcecode(self, address: str, save_dir: str) -> str: + """ + Fetch contract source code from Berascan and save it locally. + + Args: + address (str): The contract address + save_dir (str): Directory to save the source code + + Returns: + str: Path to the saved source code file + """ + self.fetch_source_code(address) + source_code = self.sourcecode[address]["SourceCode"] + contract_name = self.sourcecode[address]["ContractName"] + + # Create export directory + export_dir = os.path.join(save_dir, f"{address}-{contract_name}") + if not os.path.exists(export_dir): + os.makedirs(export_dir) + + # Handle different source code formats + dict_source_code = None + try: + # Try to parse as double-braced JSON + dict_source_code = json.loads(source_code[1:-1]) + assert isinstance(dict_source_code, dict) + except (JSONDecodeError, AssertionError): + try: + # Try to parse as single-braced JSON + dict_source_code = json.loads(source_code) + assert isinstance(dict_source_code, dict) + except (JSONDecodeError, AssertionError): + # Handle as single file + filename = os.path.join(export_dir, f"{contract_name}.sol") + with open(filename, "w", encoding="utf8") as f: + f.write(source_code) + return filename + + # Handle multiple files case + if "sources" in dict_source_code: + source_codes = dict_source_code["sources"] + else: + source_codes = dict_source_code + + filtered_paths = [] + for filename, source_code in source_codes.items(): + path_filename = Path(filename) + + # Only keep solidity files + if path_filename.suffix not in [".sol", ".vy"]: + continue + + # Handle contracts directory imports + if "contracts" == path_filename.parts[0] and not filename.startswith("@"): + path_filename = Path( + *path_filename.parts[path_filename.parts.index("contracts") :] + ) + + # Convert absolute paths to relative + if path_filename.is_absolute(): + path_filename = Path(*path_filename.parts[1:]) + + filtered_paths.append(path_filename.as_posix()) + path_filename_disk = Path(export_dir, path_filename) + + # Ensure path is within allowed directory + allowed_path = os.path.abspath(export_dir) + if ( + os.path.commonpath((allowed_path, os.path.abspath(path_filename_disk))) + != allowed_path + ): + raise IOError( + f"Path '{path_filename_disk}' is outside of the allowed directory: {allowed_path}" + ) + + # Create directory if needed + os.makedirs(path_filename_disk.parent, exist_ok=True) + + # Write file + with open(path_filename_disk, "w", encoding="utf8") as f: + f.write(source_code["content"]) + + # Handle remappings + remappings = dict_source_code.get("settings", {}).get("remappings", []) + if remappings: + remappings_path = os.path.join(export_dir, "remappings.txt") + with open(remappings_path, "w", encoding="utf8") as f: + for remapping in remappings: + if "=" in remapping: + origin, dest = remapping.split("=", 1) + # Always use a trailing slash for the destination + f.write(f"{origin}={str(Path(dest) / '_')[:-1]}\n") + + # Create metadata config + metadata_config = { + "solc_remaps": remappings if remappings else {}, + "solc_solcs_select": self.sourcecode[address].get("CompilerVersion", ""), + "solc_args": " ".join( + filter( + None, + [ + ( + "--via-ir" + if dict_source_code.get("settings", {}).get("viaIR") + else "" + ), + ( + f"--optimize --optimize-runs {self.sourcecode[address].get('Runs', '')}" + if self.sourcecode[address].get("OptimizationUsed") == "1" + else "" + ), + ( + f"--evm-version {self.sourcecode[address].get('EVMVersion')}" + if self.sourcecode[address].get("EVMVersion") + and self.sourcecode[address].get("EVMVersion") != "Default" + else "" + ), + ], + ) + ), + } + + with open( + os.path.join(export_dir, "crytic_compile.config.json"), "w", encoding="utf8" + ) as f: + json.dump(metadata_config, f) + + # Find main contract file + main_contract_path = None + for path in filtered_paths: + path_filename = Path(path) + if path_filename.stem == contract_name: + main_contract_path = os.path.join(export_dir, path) + break + elif path_filename.stem.lower() == contract_name.lower(): + main_contract_path = os.path.join(export_dir, path) + break + + # If no main contract found, use first .sol file + if main_contract_path is None: + for root, _, files in os.walk(export_dir): + for file in files: + if file.endswith(".sol"): + main_contract_path = os.path.join(root, file) + break + if main_contract_path: + break + + return main_contract_path diff --git a/src/permission_scanner/utils/block_explorer_config.json b/src/permission_scanner/utils/block_explorer_config.json new file mode 100644 index 0000000..0444a89 --- /dev/null +++ b/src/permission_scanner/utils/block_explorer_config.json @@ -0,0 +1,7 @@ +{ + "mainnet": { "base_url": "https://api.etherscan.io/v2/api", "chainid": 1 }, + "bsc": { "base_url": "https://api.etherscan.io/v2/api", "chainid": 56 }, + "berachain": { + "base_url": "https://api.berascan.com/api" + } +} diff --git a/src/markdown_generator.py b/src/permission_scanner/utils/markdown_generator.py similarity index 74% rename from src/markdown_generator.py rename to src/permission_scanner/utils/markdown_generator.py index 0461757..8f8bb9f 100644 --- a/src/markdown_generator.py +++ b/src/permission_scanner/utils/markdown_generator.py @@ -1,26 +1,21 @@ - def generate_contracts_table(contracts_object_list): - """ - """ + """ """ md_content = "## Contracts\n| Contract Name | Address |\n" md_content += "|--------------|--------------|\n" - + for contract in contracts_object_list: md_content += f"| {contract['name']} | {contract['address']} |\n" - # try: - # md_content += f"| {contract['implementation_name']} | ... |\n" - # except KeyError: - # # not a proxy but a standard contract - # pass - + return md_content + def generate_permissions_table(permissions): - """ - """ + """ """ md_content = "## Permission\n| Contract | Function | Impact | Owner |\n" - md_content += "|-------------|------------|-------------------------|-------------------|\n" - + md_content += ( + "|-------------|------------|-------------------------|-------------------|\n" + ) + for contract, entries in permissions.items(): try: proxy_permissions = entries["proxy_permissions"] @@ -29,11 +24,11 @@ def generate_permissions_table(permissions): for permissioned_function in permissioned_functions: owner = "" try: - owner = permissioned_function['_owner'] + owner = permissioned_function["_owner"] except KeyError: - owner = permissioned_function['Modifiers'] + owner = permissioned_function["Modifiers"] pass - + md_content += f"| {contract_name} | {permissioned_function['Function']} | ... | {owner} |\n" except KeyError: # just a normal contract @@ -45,18 +40,17 @@ def generate_permissions_table(permissions): for permissioned_function in permissioned_functions: owner = "" try: - owner = permissioned_function['_owner'] + owner = permissioned_function["_owner"] except KeyError: # no simple owner found - owner = permissioned_function['Modifiers'] + owner = permissioned_function["Modifiers"] pass - + md_content += f"| {contract_name} | {permissioned_function['Function']} | ... | {owner} |\n" - + return md_content -def generate_full_markdown(protocol_metadata, contracts, permissions) -> str: - - return f"{generate_contracts_table(contracts)}\n\n{generate_permissions_table(permissions)}" +def generate_full_markdown(protocol_metadata, contracts, permissions) -> str: + return f"{generate_contracts_table(contracts)}\n\n{generate_permissions_table(permissions)}" diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..d4b7daa --- /dev/null +++ b/uv.lock @@ -0,0 +1,8 @@ +version = 1 +revision = 2 +requires-python = ">=3.11" + +[[package]] +name = "permission-scanner" +version = "0.1.0" +source = { editable = "." }