diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..620e845 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,3 @@ +[*.rs] +indent_style = space +indent_size = 2 \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..3c0d9a5 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,52 @@ +name: CI + +on: + pull_request: + branches: [main, init] + workflow_dispatch: + +jobs: + build-and-test: + runs-on: ubuntu-latest + env: + WASM_PACK_VERSION: 0.12.1 + steps: + - uses: actions/checkout@v4 + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + target: wasm32-unknown-unknown + override: true + - name: Cache cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Cache wasm-pack + id: cache-wasm-pack + uses: actions/cache@v4 + with: + path: ~/.cargo/bin/wasm-pack + key: ${{ runner.os }}-wasm-pack-${{ env.WASM_PACK_VERSION }} + - name: Ensure wasm-pack installed + run: | + if ! command -v wasm-pack >/dev/null 2>&1; then + echo "wasm-pack not found, installing $WASM_PACK_VERSION"; + curl -sL "https://github.com/rustwasm/wasm-pack/releases/download/v${WASM_PACK_VERSION}/wasm-pack-v${WASM_PACK_VERSION}-x86_64-unknown-linux-musl.tar.gz" \ + | tar xz -C /tmp + mv /tmp/wasm-pack-v${WASM_PACK_VERSION}-x86_64-unknown-linux-musl/wasm-pack ~/.cargo/bin/ + chmod +x ~/.cargo/bin/wasm-pack + else + echo "Found existing wasm-pack: $(wasm-pack --version)"; + fi + ls -l ~/.cargo/bin/ | grep wasm-pack || echo 'wasm-pack binary not listed' + - name: Native tests + run: cargo test --workspace --exclude source_map_parser_node --all-features + - name: WASM node tests + run: wasm-pack test --node crates/node_sdk diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..63f926e --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,212 @@ +name: Release + +on: + push: + tags: + - 'v*' + +permissions: + contents: write + packages: write + +env: + WASM_PACK_VERSION: 0.12.1 + +jobs: + verify-and-test: + runs-on: ubuntu-latest + outputs: + version: ${{ steps.extract.outputs.version }} + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Extract version from tag + id: extract + run: | + TAG="${GITHUB_REF_NAME}" + VERSION="${TAG#v}" + echo "version=$VERSION" >> $GITHUB_OUTPUT + echo "Tag version: $VERSION" + - name: Check crate versions match tag + run: | + TAG_VERSION=${{ steps.extract.outputs.version }} + CORE_VERSION=$(grep '^version' crates/source_map_parser/Cargo.toml | head -1 | cut -d '"' -f2) + NODE_VERSION=$(grep '^version' crates/node_sdk/Cargo.toml | head -1 | cut -d '"' -f2) + echo "core: $CORE_VERSION node: $NODE_VERSION tag: $TAG_VERSION" + if [ "$CORE_VERSION" != "$TAG_VERSION" ] || [ "$NODE_VERSION" != "$TAG_VERSION" ]; then + echo "Version mismatch. Please bump crate versions to $TAG_VERSION before tagging." >&2 + exit 1 + fi + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + target: wasm32-unknown-unknown + override: true + - name: Cache cargo + uses: actions/cache@v4 + with: + path: | + ~/.cargo/registry + ~/.cargo/git + target + key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }} + restore-keys: ${{ runner.os }}-cargo- + - name: Cache wasm-pack + id: cache-wasm-pack + uses: actions/cache@v4 + with: + path: ~/.cargo/bin/wasm-pack + key: ${{ runner.os }}-wasm-pack-${{ env.WASM_PACK_VERSION }} + - name: Install wasm-pack + if: steps.cache-wasm-pack.outputs.cache-hit != 'true' + run: curl -sSf https://raw.githubusercontent.com/rustwasm/wasm-pack/master/docs/book/src/install.sh | bash -s -- -f -v $WASM_PACK_VERSION + - name: Run tests (native) + run: cargo test --workspace --exclude source_map_parser_node --all-features + - name: Run tests (wasm) + run: wasm-pack test --node crates/node_sdk + - name: Generate CHANGELOG + env: + REPO_URL: ${{ github.server_url }}/${{ github.repository }} + run: | + chmod +x scripts/generate-changelog.sh + ./scripts/generate-changelog.sh ${{ steps.extract.outputs.version }} "$REPO_URL" + - name: Upload changelog artifact + uses: actions/upload-artifact@v4 + with: + name: changelog + path: CHANGELOG.md + + publish-crates: + needs: verify-and-test + runs-on: ubuntu-latest + environment: release + steps: + - uses: actions/checkout@v4 + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + override: true + - name: Cargo login + run: cargo login ${{ secrets.CARGO_REGISTRY_TOKEN }} + - name: Publish core crate + run: | + cargo publish -p source_map_parser --no-verify || echo "core crate already published" + - name: Publish node wasm crate + run: | + cargo publish -p source_map_parser_node --no-verify || echo "node crate already published" + + publish-npm: + needs: [verify-and-test] + runs-on: ubuntu-latest + env: + NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }} + steps: + - uses: actions/checkout@v4 + - name: Set up Rust + uses: actions-rs/toolchain@v1 + with: + toolchain: stable + profile: minimal + target: wasm32-unknown-unknown + override: true + - name: Cache wasm-pack + id: cache-wasm-pack + uses: actions/cache@v4 + with: + path: ~/.cargo/bin/wasm-pack + key: ${{ runner.os }}-wasm-pack-${{ env.WASM_PACK_VERSION }} + - name: Install wasm-pack + if: steps.cache-wasm-pack.outputs.cache-hit != 'true' + run: curl -sSf https://raw.githubusercontent.com/rustwasm/wasm-pack/master/docs/book/src/install.sh | bash -s -- -f -v $WASM_PACK_VERSION + - name: Build wasm package (node) + run: | + wasm-pack build crates/node_sdk --target nodejs --out-dir pkg --release + ls -l pkg + - name: Set npm auth + run: | + echo "//registry.npmjs.org/:_authToken=${NODE_AUTH_TOKEN}" > ~/.npmrc + - name: Publish to npm + run: | + cd pkg + npm publish --access public || echo "npm package already published" + + github-release: + needs: [publish-crates, publish-npm] + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - name: Download changelog artifact + uses: actions/download-artifact@v4 + with: + name: changelog + - name: Extract current release notes + id: notes + run: | + awk '/^## v/' CHANGELOG.md | head -1 > notes.txt + # Append section under first heading until next heading + awk 'NR==1{p=1} /^## v/ && NR>1{p=0} p{print}' CHANGELOG.md > notes.txt + - name: Publish GitHub Release + uses: softprops/action-gh-release@v2 + with: + tag_name: ${{ github.ref_name }} + name: source-map-parser ${{ github.ref_name }} + body_path: notes.txt + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + publish-gitlab: + needs: [verify-and-test] + runs-on: ubuntu-latest + env: + GITLAB_TOKEN: ${{ secrets.GITLAB_TOKEN }} + GITLAB_PROJECT_ID: ${{ secrets.GITLAB_PROJECT_ID }} + GITLAB_PROJECT_PATH: ${{ secrets.GITLAB_PROJECT_PATH }} + VERSION: ${{ needs.verify-and-test.outputs.version }} + steps: + - uses: actions/checkout@v4 + - name: Prepare artifacts + run: | + cargo package -p source_map_parser --allow-dirty + cargo package -p source_map_parser_node --allow-dirty + wasm-pack build crates/node_sdk --target nodejs --out-dir pkg --release + tar czf source_map_parser-${VERSION}.crate.tar.gz target/package/source_map_parser-*.crate + tar czf source_map_parser_node-${VERSION}.crate.tar.gz target/package/source_map_parser_node-*.crate + tar czf source_map_parser_node-wasm-${VERSION}.tgz -C pkg . + - name: Upload to GitLab generic packages + if: env.GITLAB_TOKEN != '' + run: | + for f in source_map_parser-${VERSION}.crate.tar.gz source_map_parser_node-${VERSION}.crate.tar.gz source_map_parser_node-wasm-${VERSION}.tgz; do + echo "Uploading $f" + curl --fail -H "PRIVATE-TOKEN: $GITLAB_TOKEN" --upload-file "$f" \ + "https://gitlab.com/api/v4/projects/${GITLAB_PROJECT_ID}/packages/generic/source-map-parser/${VERSION}/$f" + done + - name: Publish to GitLab npm registry + if: env.GITLAB_TOKEN != '' + run: | + if [ -z "$GITLAB_PROJECT_PATH" ]; then + echo "GITLAB_PROJECT_PATH secret not set, skip GitLab npm publish"; + exit 0; + fi + # Configure .npmrc for GitLab registry (avoid heredoc for YAML lint friendliness) + echo "@${GITLAB_PROJECT_PATH#*/}:registry=https://gitlab.com/api/v4/projects/${GITLAB_PROJECT_ID}/packages/npm/" > .npmrc + echo "//gitlab.com/api/v4/projects/${GITLAB_PROJECT_ID}/packages/npm/:_authToken=${GITLAB_TOKEN}" >> .npmrc + echo "always-auth=true" >> .npmrc + # Adjust package name for GitLab scope if needed + PACKAGE_JSON=pkg/package.json + if jq -e '.name' "$PACKAGE_JSON" >/dev/null 2>&1; then + ORIGINAL_NAME=$(jq -r '.name' $PACKAGE_JSON) + if [[ ! $ORIGINAL_NAME == @*/* ]]; then + # prepend scope from project path last segment + SCOPE="@${GITLAB_PROJECT_PATH##*/}" + TMP=$(mktemp) + jq --arg scope "$SCOPE" --arg name "$ORIGINAL_NAME" '.name = ($scope + "/" + $name)' $PACKAGE_JSON > $TMP && mv $TMP $PACKAGE_JSON + fi + fi + (cd pkg && npm publish --registry https://gitlab.com/api/v4/projects/${GITLAB_PROJECT_ID}/packages/npm/ || echo "GitLab npm publish skipped/failed") diff --git a/.gitignore b/.gitignore index ad67955..c2d9a94 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,11 @@ # Generated by Cargo # will have compiled files and executables -debug -target +debug/ +target/ + +# Remove Cargo.lock from gitignore if creating an executable, leave it for libraries +# More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html +Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk @@ -9,13 +13,17 @@ target # MSVC Windows builds of rustc generate these, which store debugging information *.pdb -# Generated by cargo mutants -# Contains mutation testing data -**/mutants.out*/ -# RustRover -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +# Added by cargo + +/target + +.cache/ + +.DS_Store + +dist/ + +crates/js_sdk/pkg + +.vscode \ No newline at end of file diff --git a/.rustfmt.toml b/.rustfmt.toml new file mode 100644 index 0000000..b196eaa --- /dev/null +++ b/.rustfmt.toml @@ -0,0 +1 @@ +tab_spaces = 2 diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md new file mode 100644 index 0000000..500a87d --- /dev/null +++ b/CONTRIBUTORS.md @@ -0,0 +1,161 @@ +# Contributors Guide + +> 面向希望参与 `source-map-parser` 开发与发布的贡献者。本指南涵盖:开发环境、代码规范、测试、版本与发布流程、CI/CD、Changelog 生成、跨平台与多 Registry 发布(crates.io / npm / GitLab)。 + +## 快速开始 + +1. 克隆仓库并进入目录: + ```bash + git clone git@github.com:MasonChow/source-map-parser.git + cd source-map-parser + ``` +2. 安装 Rust stable(推荐使用 rustup,需 wasm 目标): + ```bash + rustup target add wasm32-unknown-unknown + ``` +3. 运行全部测试(本地): + ```bash + cargo test --workspace --exclude source_map_parser_node --all-features + wasm-pack test --node crates/node_sdk + ``` +4. (可选)启用缓存 / 构建加速:配置 `sccache` 并导出 `RUSTC_WRAPPER=sccache`。 + +## 代码结构 + +- `crates/source_map_parser`:核心逻辑 (stack 解析 / token 映射 / context snippet / error stack 映射)。 +- `crates/node_sdk`:WASM 导出 (Node 目标),通过 `wasm-bindgen` 提供 JS 可调用接口。 +- `scripts/`:自动化脚本(例如 `generate-changelog.sh`)。 +- `.github/workflows/`:CI / Release Pipelines。 + +## 开发约定 + +- 采用 Rust 2021 edition;保持 `clippy` clean(后续可加入强制检查)。 +- 测试命名:`*_test.rs` 或内联 `mod tests { ... }`;避免跨模块耦合。 +- 新功能需至少包含: + - 单元测试覆盖核心逻辑 + - 如影响 WASM API,补充 `node_sdk` 侧 wasm-bindgen 测试 +- 提交信息推荐遵循 Conventional Commits: + - `feat(scope): 描述` + - `fix(scope): 描述` + - `refactor: ...` / `perf: ...` / `docs: ...` / `test: ...` / `chore: ...` + - 破坏性变更:`feat!: ...` 或正文含 `BREAKING CHANGE:` + +## Changelog 生成逻辑 + +脚本:`scripts/generate-changelog.sh [repo_url]` + +- 自动检测最近 tag 与当前 HEAD 的提交区间 +- 解析 Conventional Commit type(scope)!: 描述 +- 输出分类 (Features / Fixes / Performance / Refactors / Docs / Tests / Build / CI / Style / Chore / Other / Breaking Changes) +- 自动生成 compare 链接(GitHub/GitLab) +- 提交哈希转为 commit 链接 + +### Mermaid:Changelog 生成流程 + +```mermaid +digraph G { + rankdir=LR; + A[读取最新 tag] --> B[git log 范围] + B --> C[逐行解析 Conventional Commit] + C --> D{匹配 type(scope)!} + D --> E[分类聚合] + D --> F[检测 BREAKING] + E --> G[生成 Compare 链接] + F --> H[Breaking Section] + G --> I[写入新版本 Heading] + H --> I + I --> J[合并旧 CHANGELOG] +} +``` + +## 版本与 Tag 策略 + +- 手动 bump 两个 crate (`crates/source_map_parser` 与 `crates/node_sdk`) 版本号保持一致。 +- `node_sdk` 中对核心 crate 需显式 `version = "x.y.z"`,以便 crates.io 发布。 +- 创建 tag:`vX.Y.Z`;CI 中将校验 tag 与 crate versions 一致。 +- 建议遵循 SemVer: + - MINOR:新增功能向后兼容 + - PATCH:修复缺陷 + - MAJOR 或 feat!: 破坏性变更 + +## 发布流水线概览 + +触发:推送 `v*` tag。 +包含 Job:版本校验测试 → 发布 crates.io → 发布 npm (wasm-pack) → 发布 GitLab Generic Packages & GitLab npm → GitHub Release。 + +### Mermaid:Release Pipeline + +```mermaid +flowchart TB + start([Push tag vX.Y.Z]) --> verify[verify-and-test]\n校验版本+测试+生成 CHANGELOG + verify --> crates[publish-crates]\n cargo publish + verify --> npmPub[publish-npm]\n wasm-pack + npm publish + verify --> gitlab[publish-gitlab]\n generic + npm registry + crates --> release[github-release]\n读取 changelog + npmPub --> release + gitlab --> release + release --> done([Release 完成]) +``` + +### GitLab 包与 npm Registry 发布 + +Job: `publish-gitlab` + +- 生成 artifacts:两个 crate 的 `.crate` 打包 + wasm 打包 `tgz` +- 上传到 Generic Packages:`/packages/generic/source-map-parser//...` +- 若配置 GitLab npm:生成 `.npmrc` 并按需注入 scope 后 `npm publish` + +### Mermaid:GitLab npm 发布 + +```mermaid +sequenceDiagram + participant J as Job + participant S as Secrets + participant R as GitLab Registry + J->>S: 读取 GITLAB_TOKEN / PROJECT_ID / PROJECT_PATH + J->>J: wasm-pack build pkg + J->>J: 生成 .npmrc 指向 project npm registry + J->>J: scope 处理 (若包名无 @scope/ 前缀) + J->>R: npm publish + R-->>J: 201 Created / Already exists +``` + +## 必要 Secrets (GitHub Actions) + +| 名称 | 用途 | +| -------------------- | ---------------------------------- | +| CARGO_REGISTRY_TOKEN | 发布到 crates.io | +| NPM_TOKEN | 发布到 npm registry (官方) | +| GITLAB_TOKEN | 上传 Generic Packages / GitLab npm | +| GITLAB_PROJECT_ID | GitLab 项目 numeric id | +| GITLAB_PROJECT_PATH | GitLab 项目完整 path (用于 scope) | + +## 本地发布前检查清单 + +- [ ] 所有测试通过 (`cargo test`, `wasm-pack test --node`) +- [ ] Changelog 已根据提交适当书写(可运行脚本预览) +- [ ] crate 版本同步且未与已发布版本冲突 +- [ ] 提交消息符合规范(尤其是 Breaking Changes) +- [ ] README 与文档更新(如 API 变更) + +## 常见问题 (FAQ) + +1. Q: 发布时报 path dependency 错误? + A: 确认 `node_sdk` 中 `source_map_parser` 依赖包含 `version = "x.y.z"`。 +2. Q: npm 包名需要自定义? + A: 修改 wasm 构建产物前生成的 `pkg/package.json` 或在 GitLab job 中跳过重写逻辑。 +3. Q: 没有触发 Release? + A: 确认推送的是轻量 tag `vX.Y.Z` 且在默认远程 (origin) 上。 +4. Q: GitLab npm 发布失败? + A: 检查 `GITLAB_PROJECT_PATH` 与 Token 权限 (write_package_registry)。 + +## 后续增强建议 + +- 引入 `cargo-deny` / `clippy` as CI gates +- 自动版本号 bump + 变更文件回写 (Release PR 模式) +- 多平台编译验证 (aarch64, windows) + sccache +- CHANGELOG 添加 commit diff 链接到每条目 (目前仅哈希链接) + +--- + +欢迎通过 Issue / PR 提交改进建议。🎉 diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000..36eb5c9 --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,13 @@ +[workspace] +members = ["crates/source_map_parser", "crates/node_sdk"] +resolver = "2" + +[workspace.package] +authors = ["MasonChow "] +description = "High performance Source Map stack trace parser & mapper (Rust + WASM)" +edition = "2021" +license = "MIT" + +[profile.release] +opt-level = 3 +debug = false diff --git a/README.md b/README.md index d14a3f0..845deed 100644 --- a/README.md +++ b/README.md @@ -1 +1,203 @@ -# source-map-parser \ No newline at end of file +# source_map_parser + +通用 Source Map 解析与源码还原核心库。支持: + +- JS 多引擎错误堆栈解析 (V8 / Firefox / Safari) +- SourceMap 位置映射还原原始行列 +- 解包全部源码 (sources + sourcesContent) +- 多端集成:Rust / Node (N-API) / WASM + +底层基于 Sentry 的 [sourcemap](https://crates.io/crates/sourcemap) 库,使用 Rust 实现高性能、纯计算、无 I/O 副作用能力。 + +## 核心 Rust API 快速示例 + +```rust +use source_map_parser::{SourceMapParserClient, stack_transform}; + +fn main() { + // 1. 解析堆栈 + let raw = "ReferenceError: x is not defined\n at foo (https://example.com/app.js:10:5)\n @https://example.com/app.js:20:15"; + let stacks = stack_transform::parse_stack_trace(raw); + + // 2. 加载 sourcemap (调用方自行读取文件/网络, 这里只是示例字符串) + let sm_bytes = br#"{\n \"version\":3,\n \"sources\":[\"src/a.js\"],\n \"sourcesContent\":[\"function add(a,b) {\\n return a+b;\\n}\\n\"],\n \"names\":[],\n \"mappings\": "AAAA"\n}"#; + let client = SourceMapParserClient::new(sm_bytes).unwrap(); + + // 3. 定位首条堆栈 + if let Some(first) = stacks.first() { + if let Some(token) = client.lookup_token(first.line, first.column) { + println!("src: {:?} line:{} column:{}", token.src, token.line, token.column); + } + } + + // 4. 解包全部源码 + let all_sources = client.unpack_all_sources(); + for (path, code) in all_sources.iter() { println!("{} => {} bytes", path, code.len()); } +} +``` + +## 功能列表 + +- parse_stack_line / parse_stack_trace: 多格式堆栈解析 +- ErrorStack::from_raw: 保留首行错误消息 + 各帧 +- SourceMapParserClient::lookup_token: 映射编译后行列到原始源码位置 +- SourceMapParserClient::unpack_all_sources: 提取所有内嵌源码 +- SourceMapParserClient::lookup_context: 通用行列 -> 上下文源码片段 (非仅限错误堆栈) +- SourceMapParserClient::map_stack_line / map_stack_line_with_context: 直接传入单行堆栈文本解析并映射 +- SourceMapParserClient::map_stack_trace: 多行堆栈批量映射 +- SourceMapParserClient::map_error_stack: 带首行错误消息的整块错误堆栈映射,可选上下文 + +## 迁移指引 (从旧版 js-stack-parser) + +| 旧接口 | 新方式 | +| ----------------------------------------------------------- | ----------------------------------------- | +| generate_source_map_token(source_map_content, line, column) | SourceMapParserClient::new + lookup_token | +| 逐行 regex 手工解析 | parse_stack_trace / parse_stack_line | +| 自行解析 sourcesContent | SourceMapParserClient::unpack_all_sources | + +旧接口仍保留 (如 `token_generator::generate_source_map_token`) 以便平滑迁移。 + +## 设计原则 + +1. 纯计算,无网络 / 磁盘 I/O +2. 失败可恢复:无法解析的堆栈行直接跳过 +3. 面向多端封装:Rust Facade 保持稳定 API +4. 性能优先:最小复制,延迟解析 + +## 后续计划 + +- SourceMap 缓存层 (LRU) +- 上下文代码可配置提取 API +- Node / WASM 新 Facade 封装 +- 性能基准 & 监测脚本 + +## 通用上下文查询示例 + +```rust +use source_map_parser::SourceMapParserClient; + +fn main() { + let sm = br#"{\n \"version\":3,\n \"sources\":[\"src/a.js\"],\n \"sourcesContent\":[\"fn1()\\nfn2()\\nfn3()\\n\\n\"],\n \"names\":[],\n \"mappings\": "AAAA"\n}"#; + let client = SourceMapParserClient::new(sm).unwrap(); + // 查询编译后第 1 行列 0 对应的原始代码, 带前后 1 行上下文 + if let Some(snippet) = client.lookup_context(1, 0, 1) { + for line in snippet.context { println!("{}{} {}", if line.is_target {">"} else {" "}, line.line, line.code); } + } +} +``` + +## WASM (Node) 导出快速使用 + +生成后的 Node 包 (`crates/node_sdk/pkg`) 暴露以下函数: + +| 函数 | 说明 | +| ------------------------------------------------------------ | -------------------------------------- | +| `lookup_token(sm, line, column)` | 获取原始行列 Token | +| `lookup_token_with_context(sm, line, column, context_lines)` | 获取带上下文 Token | +| `lookup_context(sm, line, column, context_lines)` | 仅获取上下文片段 (ContextSnippet) | +| `map_stack_line(sm, stack_line)` | 单行堆栈 -> Token | +| `map_stack_line_with_context(sm, stack_line, context_lines)` | 单行堆栈 -> 带上下文 Token | +| `map_stack_trace(sm, stack_trace)` | 多行堆栈批量映射 (不含首行错误消息) | +| `map_error_stack(sm, error_stack_raw, context_lines?)` | 整段错误堆栈 (含首行) 映射,可选上下文 | + +Node 端示例: + +```bash +node - <<'EOF' +const m = require('./crates/node_sdk/pkg'); +const sm = JSON.stringify({version:3,sources:['a.js'],sourcesContent:['fn()\n'],names:[],mappings:'AAAA'}); +console.log(JSON.parse(m.lookup_token(sm,1,0))); +EOF +``` + +## 开发 + +```bash +cargo test +``` + +### WASM 测试 (Node) + +使用 `wasm-pack test --node` 运行 `crates/node_sdk` 下的绑定测试: + +```bash +wasm-pack test --node crates/node_sdk +``` + +(不覆盖浏览器环境;如未来需要再扩展) + +CI 已提供 GitHub Actions 工作流 `.github/workflows/ci.yml`,覆盖: + +1. Rust 原生单元/集成测试 +2. Node 环境下 WASM 测试 (wasm-pack test --node) + +调试提示: + +- 确认已安装目标:`rustup target add wasm32-unknown-unknown` +- 避免直接 `cargo test --target wasm32-unknown-unknown`(缺少 runner 会尝试执行 .wasm 导致 126 错误) +- 添加新测试需使用 `#[wasm_bindgen_test]` 标注函数 + +更多多端使用方式: + +- [web/deno 使用方式](./crates/web_pkg/README.md) +- [rust 使用方式](./crates/source_map_parser/README.md) + +## 构建部署 + +本地或 CI 均可构建(标准 Rust + wasm-pack 工具链)。 + +- 安装 Rust 工具链:推荐使用 [rustup](https://rustup.rs/)。 +- 安装 wasm-pack:参考官方安装指引 [wasm-pack](https://rustwasm.github.io/wasm-pack/installer/)。 + +### 构建 + +```bash +bash build.sh +``` + +### WASM 构建 (Node 目标) + +当前仓库的 WASM 导出 crate 位于 `crates/node_sdk`,提供面向 Node.js (CommonJS) 的 API。根目录是一个 Cargo workspace(无 `[package]`),因此直接在根执行 `wasm-pack build` 会出现: + +``` +TOML parse error at line 1, column 1 +missing field `package` +``` + +请进入具体 crate 或使用提供的脚本: + +```bash +# 方式 1: 进入 crate 手动构建 +cd crates/node_sdk +wasm-pack build --target nodejs --release + +# 方式 2: 在仓库根使用脚本 +bash scripts/build-wasm-node.sh +``` + +构建输出目录:`crates/node_sdk/pkg`,包含 `package.json`, `.wasm`, 以及绑定 JS 文件,可直接 `require()` 使用。 + +快速 Node 端调用示例(假设在仓库根运行): + +```bash +node - <<'EOF' +const m = require('./crates/node_sdk/pkg'); +const sm = JSON.stringify({version:3,sources:['a.js'],sourcesContent:['fn()\n'],names:[],mappings:'AAAA'}); +console.log(JSON.parse(m.lookup_token(sm,1,0))); +EOF +``` + +(不提供浏览器 / ESM 目标,本分支仅关注 Node 使用) + +## TODO / Roadmap (扩展) + +- [ ] Node 原生 N-API 封装 (基于 napi-rs),提供更低调用开销 & Zero-Copy Buffer 传递 + - [ ] 新建 crate: `crates/node_napi` (napi-rs + feature gate) + - [ ] 暴露与 WASM 一致的高层 API (`lookup_token`, `map_error_stack` 等) + - [ ] Benchmark: N-API vs WASM (cold/warm 多次调用) + - [ ] 添加 TypeScript 声明 / 自动生成 d.ts +- [ ] SourceMap 缓存 LRU (可选容量 & 命中统计) +- [ ] CLI: `sourcemap-lookup` 支持批量堆栈文件解析 +- [ ] Web 目标 (独立 `web_sdk` crate) 支持 ESM + Tree-shaking +- [ ] 性能基准脚本 (criterion / Node benchmark) 与 README 指标展示 +- [ ] 发布流程脚本(版本号同步、npm publish、Git tag) diff --git a/crates/node_sdk/Cargo.toml b/crates/node_sdk/Cargo.toml new file mode 100644 index 0000000..a617290 --- /dev/null +++ b/crates/node_sdk/Cargo.toml @@ -0,0 +1,26 @@ +[package] +name = "source_map_parser_node" +description = "A WebAssembly package for source_map_parser" +version = "0.1.0" +authors.workspace = true +edition.workspace = true +license.workspace = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +wasm-bindgen = "0.2.89" +serde-wasm-bindgen = "0.6.5" +js-sys = "0.3" +source_map_parser = { path = "../source_map_parser", version = "0.1.0" } + +[lib] +crate-type = ["cdylib", "rlib"] + +[dev-dependencies] +wasm-bindgen-test = "0.3" + +[package.metadata.wasm-bindgen] +targets = ["wasm32-unknown-unknown"] diff --git a/crates/node_sdk/src/lib.rs b/crates/node_sdk/src/lib.rs new file mode 100644 index 0000000..baa7437 --- /dev/null +++ b/crates/node_sdk/src/lib.rs @@ -0,0 +1,255 @@ +use serde::Serialize; +use serde_json; +use wasm_bindgen::prelude::*; +use js_sys; +use source_map_parser::{stack_transform, token_generator, SourceMapParserClient}; // for Function type + +#[derive(Clone, Debug, Serialize)] +struct GenerateFailStack { + original_raw: String, + error_message: String, +} + +#[derive(Clone, Debug, Serialize)] +struct GenerateResult<'a> { + /// 堆栈信息 + stacks: Vec>, + /// 成功生成的 token + success: Vec, + /// 生成失败的任务 + fail: Vec, +} + +/// 解析整段错误堆栈并批量生成 token。 +/// +/// 参数说明: +/// stack_raw: 原始错误堆栈文本 +/// formatter: (可选) 回调,对每个堆栈里的 source_file 进行重写(如增加 .map 后缀或路径映射) +/// resolver: (可选) 回调,输入 (source_file_path:String) -> sourcemap 内容字符串; +/// 若未提供 resolver,将跳过该帧并记录失败。 +/// on_error: (可选) 失败回调 (stack_line_raw, error_message) +#[wasm_bindgen] +pub fn generate_token_by_stack_raw( + stack_raw: String, + formatter: Option, + resolver: Option, + on_error: Option, +) -> JsValue { + let error_stack = stack_transform::ErrorStack::from_raw(&stack_raw); + let mut token_generator = token_generator::GenerateToken::new(); + let mut fail_stacks: Vec = Vec::new(); + + for stack in &error_stack.stacks { + let mut source_file_path = stack.source_file.to_string(); + + if let Some(format_fn) = formatter.as_ref() { + let param_source_file_path = JsValue::from_str(&source_file_path); + let result = format_fn.call1(&JsValue::null(), ¶m_source_file_path); + source_file_path = result.unwrap().as_string().unwrap(); + } + // 使用 resolver 获取 sourcemap 内容 + if let Some(resolver_fn) = resolver.as_ref() { + let path_val = JsValue::from_str(&source_file_path); + match resolver_fn.call1(&JsValue::null(), &path_val) { + Ok(content_val) => { + if let Some(content) = content_val.as_string() { + token_generator.add_task(token_generator::GenerateTask { + source_map_content: content, + line: stack.line, + column: stack.column, + source_line_offset: Some(5), + }); + } else { + let msg = "resolver did not return string".to_string(); + fail_stacks.push(GenerateFailStack { + original_raw: stack.original_raw.to_string(), + error_message: msg.clone(), + }); + if let Some(on_error) = on_error.as_ref() { + let _ = on_error.call2( + &JsValue::null(), + &JsValue::from_str(&stack.original_raw), + &JsValue::from_str(&msg), + ); + } + } + } + Err(err) => { + let err_str = err.as_string().unwrap_or_else(|| "resolver error".into()); + fail_stacks.push(GenerateFailStack { + original_raw: stack.original_raw.to_string(), + error_message: err_str.clone(), + }); + if let Some(on_error) = on_error.as_ref() { + let _ = on_error.call2( + &JsValue::null(), + &JsValue::from_str(&stack.original_raw), + &JsValue::from_str(&err_str), + ); + } + } + } + } else { + // 未提供 resolver + let msg = "no resolver provided".to_string(); + fail_stacks.push(GenerateFailStack { + original_raw: stack.original_raw.to_string(), + error_message: msg.clone(), + }); + if let Some(on_error) = on_error.as_ref() { + let _ = on_error.call2( + &JsValue::null(), + &JsValue::from_str(&stack.original_raw), + &JsValue::from_str(&msg), + ); + } + } + } + + token_generator.generate(); + + let result = GenerateResult { + stacks: error_stack.stacks.clone(), + success: token_generator.get_tokens(), + fail: fail_stacks, + }; + + let json = serde_json::to_string(&result).unwrap_or_else(|_| panic!("to_string failed")); + + JsValue::from_str(&json) +} + +#[wasm_bindgen] +pub fn generate_token_by_single_stack( + line: u32, + column: u32, + source_map_content: String, + context_offset: Option, +) -> JsValue { + let result: Option = + token_generator::get_stack_source(&source_map_content, line, column, context_offset); + + let json = serde_json::to_string(&result).unwrap_or_else(|_| "{\"error\":\"serialization failed\"}".to_string()); + + JsValue::from_str(&json) +} + +// ---------------- SourceMapParserClient 高层能力 WASM 导出 ---------------- + +#[derive(Serialize)] +struct WasmContextFrameLine { + line: u32, + is_target: bool, + code: String, +} + +#[derive(Serialize)] +struct WasmContextSnippet { + src: String, + line: u32, + column: u32, + context: Vec, +} + +#[wasm_bindgen] +pub fn lookup_token(source_map_content: &str, line: u32, column: u32) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let tok = client.lookup_token(line, column); + JsValue::from_str(&serde_json::to_string(&tok).unwrap()) +} + +#[wasm_bindgen] +pub fn lookup_token_with_context( + source_map_content: &str, + line: u32, + column: u32, + context_lines: u32, +) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let tok = client.lookup_token_with_context(line, column, context_lines); + JsValue::from_str(&serde_json::to_string(&tok).unwrap()) +} + +#[wasm_bindgen] +pub fn lookup_context( + source_map_content: &str, + line: u32, + column: u32, + context_lines: u32, +) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let snippet = client + .lookup_context(line, column, context_lines) + .map(|s| WasmContextSnippet { + src: s.src, + line: s.line, + column: s.column, + context: s + .context + .into_iter() + .map(|l| WasmContextFrameLine { + line: l.line, + is_target: l.is_target, + code: l.code, + }) + .collect(), + }); + JsValue::from_str(&serde_json::to_string(&snippet).unwrap()) +} + +#[wasm_bindgen] +pub fn map_stack_line(source_map_content: &str, stack_line: &str) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let tok = client.map_stack_line(stack_line); + JsValue::from_str(&serde_json::to_string(&tok).unwrap()) +} + +#[wasm_bindgen] +pub fn map_stack_line_with_context( + source_map_content: &str, + stack_line: &str, + context_lines: u32, +) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let tok = client.map_stack_line_with_context(stack_line, context_lines); + JsValue::from_str(&serde_json::to_string(&tok).unwrap()) +} + +#[wasm_bindgen] +pub fn map_stack_trace(source_map_content: &str, stack_trace: &str) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let list = client.map_stack_trace(stack_trace); + JsValue::from_str(&serde_json::to_string(&list).unwrap()) +} + +#[wasm_bindgen] +pub fn map_error_stack( + source_map_content: &str, + error_stack_raw: &str, + context_lines: Option, +) -> JsValue { + let client = match SourceMapParserClient::new(source_map_content.as_bytes()) { + Ok(c) => c, + Err(e) => return JsValue::from_str(&format!("{{\"error\":\"{}\"}}", e)), + }; + let mapped = client.map_error_stack(error_stack_raw, context_lines); + JsValue::from_str(&serde_json::to_string(&mapped).unwrap()) +} diff --git a/crates/node_sdk/tests/wasm_extended.rs b/crates/node_sdk/tests/wasm_extended.rs new file mode 100644 index 0000000..09a931a --- /dev/null +++ b/crates/node_sdk/tests/wasm_extended.rs @@ -0,0 +1,74 @@ +use source_map_parser_node::{ + generate_token_by_single_stack, generate_token_by_stack_raw, map_error_stack, map_stack_trace, +}; +use wasm_bindgen_test::*; + +// 仅 Node 环境测试 (wasm-pack test --node),不配置浏览器宏 + +fn sm_one(content: &str) -> String { + let esc = content.replace('\n', "\\n"); + format!("{{\"version\":3,\"file\":\"min.js\",\"sources\":[\"a.js\"],\"sourcesContent\":[\"{esc}\"],\"names\":[],\"mappings\":\"AAAA\"}}") +} + +#[wasm_bindgen_test] +fn single_stack_token_ok() { + let sm = sm_one("fn()\n"); + let v = generate_token_by_single_stack(1, 0, sm, Some(1)); + let s = v.as_string().unwrap(); + assert!(s.contains("source_code")); +} + +#[wasm_bindgen_test] +fn single_stack_token_none_when_invalid_line() { + let sm = sm_one("fn()\n"); + let v = generate_token_by_single_stack(0, 0, sm, None); + let s = v.as_string().unwrap(); + assert_eq!(s, "null"); +} + +#[wasm_bindgen_test] +fn generate_token_by_stack_raw_with_resolver() { + use js_sys::Function; + let stack_raw = "Error: x\n at foo (https://example.com/min.js:1:0)"; + let sm = sm_one("fn()\n"); + // formatter: identity + let formatter = Function::new_no_args("return arguments[0];"); + // resolver: always return the sm + let resolver = Function::new_with_args("p", &format!("return `{}`;", sm)); + let js = generate_token_by_stack_raw( + stack_raw.to_string(), + Some(formatter.clone()), + Some(resolver.clone()), + None, + ); + let s = js.as_string().unwrap(); + assert!(s.contains("success")); + assert!(s.contains("\"fail\":[]")); +} + +#[wasm_bindgen_test] +fn map_error_stack_with_context_some() { + let sm = sm_one("l0()\nl1()\n"); + let err = "Error: boom\n at foo (https://example.com/min.js:1:0)"; + let js = map_error_stack(&sm, err, Some(1)); + let s = js.as_string().unwrap(); + assert!(s.contains("frames_with_context")); +} + +#[wasm_bindgen_test] +fn map_error_stack_without_context() { + let sm = sm_one("l0()\nl1()\n"); + let err = "Error: boom\n at foo (https://example.com/min.js:1:0)"; + let js = map_error_stack(&sm, err, None); + let s = js.as_string().unwrap(); + assert!(s.contains("frames\"")); +} + +#[wasm_bindgen_test] +fn map_stack_trace_multi() { + let sm = sm_one("l0()\n"); + let trace = "at foo (https://example.com/min.js:1:0)\n@https://example.com/min.js:1:0"; + let js = map_stack_trace(&sm, trace); + let s = js.as_string().unwrap(); + assert!(s.starts_with("[")); +} diff --git a/crates/node_sdk/tests/wasm_smoke.rs b/crates/node_sdk/tests/wasm_smoke.rs new file mode 100644 index 0000000..498c8a3 --- /dev/null +++ b/crates/node_sdk/tests/wasm_smoke.rs @@ -0,0 +1,34 @@ +use source_map_parser_node::{lookup_token, lookup_token_with_context, map_stack_line}; +use wasm_bindgen_test::*; +// Node 环境:无需显式配置宏 (browser 专用) + +fn sample_sm() -> String { + // simple one-line mapping + let sm = r#"{"version":3,"file":"min.js","sources":["a.js"],"sourcesContent":["fn()\n"],"names":[],"mappings":"AAAA"}"#; + sm.to_string() +} + +#[wasm_bindgen_test] +fn lookup_basic() { + let sm = sample_sm(); + let v = lookup_token(&sm, 1, 0); + let s = v.as_string().unwrap(); + assert!(s.contains("\"line\":0")); +} + +#[wasm_bindgen_test] +fn lookup_with_context() { + let sm = sample_sm(); + let v = lookup_token_with_context(&sm, 1, 0, 1); + let s = v.as_string().unwrap(); + assert!(s.contains("source_code")); +} + +#[wasm_bindgen_test] +fn map_stack_line_smoke() { + let sm = sample_sm(); + let line = "at foo (https://example.com/min.js:1:0)"; + let v = map_stack_line(&sm, line); + let s = v.as_string().unwrap(); + assert!(s.contains("line")); +} diff --git a/crates/source_map_parser/Cargo.toml b/crates/source_map_parser/Cargo.toml new file mode 100644 index 0000000..0366fb2 --- /dev/null +++ b/crates/source_map_parser/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "source_map_parser" +description = "Parse and map JavaScript (and other) stack traces back to original source via source maps" +version = "0.1.0" +authors.workspace = true +edition.workspace = true +license.workspace = true + +[dependencies] +once_cell = "1.18.0" +pretty_assertions = "1.4.1" +regex = "1.10.2" +rstest = "0.23.0" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +sourcemap = "7.0.1" +thiserror = "1.0" diff --git a/crates/source_map_parser/README.md b/crates/source_map_parser/README.md new file mode 100644 index 0000000..caa6e6a --- /dev/null +++ b/crates/source_map_parser/README.md @@ -0,0 +1,70 @@ +# source_map_parser (原 js_stack_parser 核心能力迁移) + +通用 Source Map 解析与堆栈源码还原核心库。提供多引擎(V8 / Firefox / Safari)堆栈解析、行列映射、源码上下文提取、整段错误堆栈批量还原等能力。 + +## 安装 + +```toml +[dependencies] +source_map_parser = { git = "" } +``` + +## 快速开始 + +```rust +use source_map_parser::{SourceMapParserClient, stack_transform}; + +fn main() { + let trace = "ReferenceError: x is not defined\n at foo (https://example.com/app.js:10:5)"; + let frames = stack_transform::parse_stack_trace(trace); + let sm = br#"{\n \"version\":3,\n \"sources\":[\"src/a.js\"],\n \"sourcesContent\":[\"fn()\\n\"],\n \"names\":[],\n \"mappings\": \"AAAA\"\n}"#; + let client = SourceMapParserClient::new(sm).unwrap(); + if let Some(f) = frames.first() { if let Some(tok) = client.lookup_token(f.line, f.column) { println!("{:?} {} {}", tok.src, tok.line, tok.column); }} +} +``` + +## 主要 API + +| 分类 | API | 说明 | +| -------- | ------------------------------------------------ | ------------------------------ | +| 解析 | parse_stack_line / parse_stack_trace | 多引擎 JS 堆栈行/批量解析 | +| 错误堆栈 | ErrorStack::from_raw | 提取首行错误信息 + 帧集合 | +| 定位 | SourceMapParserClient::lookup_token | 编译后行列 -> 原始源码位置 | +| 上下文 | SourceMapParserClient::lookup_token_with_context | 同时返回上下文代码窗口 | +| 上下文 | SourceMapParserClient::lookup_context | 无需 token,只获取上下文片段 | +| 批量 | SourceMapParserClient::map_stack_trace | 多行堆栈文本批量映射 | +| 错误堆栈 | SourceMapParserClient::map_error_stack | 带错误首行整段映射,可选上下文 | +| 源码 | SourceMapParserClient::unpack_all_sources | 解包所有 sourcesContent | + +## 整段映射示例 + +```rust +use source_map_parser::SourceMapParserClient; +fn main() { + let sm = br#"{\n \"version\":3,\n \"sources\":[\"src/a.js\"],\n \"sourcesContent\":[\"fn1()\\nfn2()\\nfn3()\\n\"],\n \"names\":[],\n \"mappings\": \"AAAA\"\n}"#; + let client = SourceMapParserClient::new(sm).unwrap(); + let err = "ReferenceError: x is not defined\n at foo (https://example.com/app.js:1:0)"; + let mapped = client.map_error_stack(err, Some(1)); + println!("frames={} ctx_frames={}", mapped.frames.len(), mapped.frames_with_context.len()); +} +``` + +## 升级说明 + +旧 crate 名称 `js_stack_parser` 已被替换;核心 API 在新 crate 下保持一致,原始低层函数仍可在迁移期保留(如需)但建议使用 `SourceMapParserClient` 高层封装。 + +## 设计原则 + +- 纯计算:不做文件/网络 I/O +- 明确分层:解析 / 定位 / 上下文 分离 +- 稳定 API:Facade 封装便于多端 (WASM / Node) 绑定 + +## 计划 + +- LRU SourceMap 缓存 +- bench 性能基准 +- 失败行诊断信息增强 + +--- + +欢迎 issue / PR 进一步完善。 diff --git a/crates/source_map_parser/src/context_lookup.rs b/crates/source_map_parser/src/context_lookup.rs new file mode 100644 index 0000000..f2009e2 --- /dev/null +++ b/crates/source_map_parser/src/context_lookup.rs @@ -0,0 +1,81 @@ +use serde::Serialize; +use sourcemap::SourceMap; + +#[derive(Clone, Debug, Serialize)] +pub struct ContextLine { + pub line: u32, + pub is_target: bool, + pub code: String, +} +#[derive(Clone, Debug, Serialize)] +pub struct ContextSnippet { + pub src: String, + pub line: u32, + pub column: u32, + pub context: Vec, +} + +pub fn lookup_context_from_sourcemap( + sourcemap: &SourceMap, + compile_line: u32, + compile_column: u32, + context_lines: u32, +) -> Option { + if compile_line == 0 { + return None; + } + let token = sourcemap.lookup_token(compile_line - 1, compile_column)?; + let origin_line = token.get_src_line() as u32; + let origin_col = token.get_src_col() as u32; + let src = token + .get_source() + .map(|s| s.to_string()) + .unwrap_or_default(); + let view = token.get_source_view()?; + let source_text = view.source(); + let source_lines: Vec<&str> = source_text.lines().collect(); + let start = origin_line.saturating_sub(context_lines); + let end = origin_line + context_lines; + let mut context: Vec = Vec::new(); + for ln in start..=end { + let code = source_lines + .get(ln as usize) + .cloned() + .unwrap_or("") + .to_string(); + context.push(ContextLine { + line: ln, + is_target: ln == origin_line, + code, + }); + } + Some(ContextSnippet { + src, + line: origin_line, + column: origin_col, + context, + }) +} + +#[cfg(test)] +mod tests { + use super::*; + fn sm(content: &str) -> sourcemap::SourceMap { + // 需要对换行做 \n 转义以避免 JSON 原始控制字符 + let escaped = content.replace('\n', "\\n"); + let raw = format!("{{\"version\":3,\"file\":\"min.js\",\"sources\":[\"a.js\"],\"sourcesContent\":[\"{escaped}\"],\"names\":[],\"mappings\":\"AAAA\"}}"); + sourcemap::SourceMap::from_reader(raw.as_bytes()).unwrap() + } + #[test] + fn lookup_basic_context() { + let smap = sm("l0()\nl1()\nl2()\n"); + let snippet = lookup_context_from_sourcemap(&smap, 1, 0, 1).unwrap(); + assert!(snippet.context.len() >= 2); + assert!(snippet.context.iter().any(|c| c.is_target)); + } + #[test] + fn lookup_returns_none_when_line_zero() { + let smap = sm("a()\n"); + assert!(lookup_context_from_sourcemap(&smap, 0, 0, 1).is_none()); + } +} diff --git a/crates/source_map_parser/src/lib.rs b/crates/source_map_parser/src/lib.rs new file mode 100644 index 0000000..2eb3370 --- /dev/null +++ b/crates/source_map_parser/src/lib.rs @@ -0,0 +1,195 @@ +/// 通用位置 -> 上下文代码片段 +pub mod context_lookup; +/// 解包 source map 内容 +pub mod sourcemap_unpacker; +/// 解析堆栈信息内容, 转换为 [`stack_transform::ErrorStack`] 结构体 +pub mod stack_transform; +/// 生成 source map token +pub mod token_generator; + +use sourcemap::SourceMap; +use std::collections::HashMap; + +use context_lookup::{lookup_context_from_sourcemap, ContextSnippet}; +use token_generator::{ + generate_context_token_from_map, generate_source_map_token_from_map, SourceMapToken, Token, +}; + +/// 核心门面: 绑定一个 SourceMap 提供高层 API +pub struct SourceMapParserClient { + sourcemap: SourceMap, +} + +#[derive(thiserror::Error, Debug)] +pub enum ClientError { + #[error("invalid sourcemap: {0}")] + InvalidSourceMap(String), +} + +impl SourceMapParserClient { + /// 通过 source map 原始字节创建客户端 + pub fn new(sourcemap_content: &[u8]) -> Result { + let sm = SourceMap::from_slice(sourcemap_content) + .map_err(|e| ClientError::InvalidSourceMap(e.to_string()))?; + Ok(Self { sourcemap: sm }) + } + + /// 查找原始 token (1-based 行) + pub fn lookup_token(&self, line: u32, column: u32) -> Option { + generate_source_map_token_from_map(&self.sourcemap, line, column) + } + + /// 解包所有源码 + pub fn unpack_all_sources(&self) -> HashMap { + crate::sourcemap_unpacker::unpack_sources(&self.sourcemap) + } + + /// 带上下文源码 (context 行向前向后扩展) 获取 Token + pub fn lookup_token_with_context( + &self, + line: u32, + column: u32, + context_lines: u32, + ) -> Option { + generate_context_token_from_map(&self.sourcemap, line, column, context_lines) + } + + /// 通用能力:传入编译后行/列 + 上下文行数,返回原始源码上下文片段 (适用于非错误堆栈场景) + pub fn lookup_context( + &self, + line: u32, + column: u32, + context_lines: u32, + ) -> Option { + lookup_context_from_sourcemap(&self.sourcemap, line, column, context_lines) + } + + /// 便捷:单行堆栈映射 (解析+还原) - 无上下文 + pub fn map_stack_line(&self, stack_line: &str) -> Option { + if let Some(stack) = crate::stack_transform::parse_stack_line(stack_line) { + self.lookup_token(stack.line, stack.column) + } else { + None + } + } + + /// 便捷:单行堆栈映射 (带上下文) -> Token 结构 (包含多行) + pub fn map_stack_line_with_context(&self, stack_line: &str, context_lines: u32) -> Option { + if let Some(stack) = crate::stack_transform::parse_stack_line(stack_line) { + self.lookup_token_with_context(stack.line, stack.column, context_lines) + } else { + None + } + } + + /// 便捷:多行堆栈 (只传堆栈文本块, 不含首行错误信息) 逐行映射 + pub fn map_stack_trace(&self, trace: &str) -> Vec { + crate::stack_transform::parse_stack_trace(trace) + .into_iter() + .filter_map(|s| self.lookup_token(s.line, s.column)) + .collect() + } + + /// 便捷:错误堆栈 (包含首行错误消息) + 上下文,可选 context_lines + pub fn map_error_stack( + &self, + error_stack_raw: &str, + context_lines: Option, + ) -> MappedErrorStack { + let es = crate::stack_transform::ErrorStack::from_raw(error_stack_raw); + let mut frames_simple = Vec::new(); + let mut frames_with_context = Vec::new(); + for st in &es.stacks { + if let Some(cl) = context_lines { + if let Some(tok) = self.lookup_token_with_context(st.line, st.column, cl) { + frames_with_context.push(tok); + } + } else if let Some(tok) = self.lookup_token(st.line, st.column) { + frames_simple.push(tok); + } + } + MappedErrorStack { + error_message: es.error_message, + frames: frames_simple, + frames_with_context, + } + } +} + +/// 错误堆栈批量映射结果 +#[derive(Debug, Clone, serde::Serialize)] +pub struct MappedErrorStack { + pub error_message: String, + pub frames: Vec, + pub frames_with_context: Vec, +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::stack_transform::parse_stack_trace; + + #[test] + fn test_client_lookup() { + let sm = br#"{ + "version":3, + "file":"min.js", + "sources":["src/a.js"], + "sourcesContent":["function add(a,b){\n return a+b;\n}\n"], + "names":["add","a","b"], + "mappings":"AAAA,SAASA,IAAI,CAACC,CAAC,EAAEC,CAAC,EAAE;EACrB,OAAOD,CAAC,GAAGC,CAAC;AACjB"}"#; + let client = SourceMapParserClient::new(sm).expect("create client"); + let token = client.lookup_token(1, 0).expect("token"); + assert!(token.src.unwrap().ends_with("src/a.js")); + } + + #[test] + fn test_client_lookup_with_context() { + let sm = br#"{ + "version":3, + "file":"min.js", + "sources":["src/a.js"], + "sourcesContent":["line0()\nline1()\nline2()\nline3()\n"], + "names":[], + "mappings":"AAAA"}"#; + let client = SourceMapParserClient::new(sm).unwrap(); + let tok = client.lookup_token_with_context(1, 0, 1).unwrap(); + assert!(tok.source_code.len() >= 2); + } + + #[test] + fn test_client_generic_context() { + let sm = br#"{ + "version":3, + "file":"min.js", + "sources":["src/a.js"], + "sourcesContent":["a()\nb()\nc()\nd()\n"], + "names":[], + "mappings":"AAAA"}"#; + let client = SourceMapParserClient::new(sm).unwrap(); + let snippet = client.lookup_context(1, 0, 2).unwrap(); + assert!(snippet.context.len() >= 3); + } + + #[test] + fn test_client_map_stack_line() { + let sm = br#"{ + "version":3, + "file":"min.js", + "sources":["src/a.js"], + "sourcesContent":["fn()\n"], + "names":[], + "mappings":"AAAA" + }"#; + let client = SourceMapParserClient::new(sm).unwrap(); + let line = "at foo (https://example.com/min.js:1:0)"; + let _ = client.map_stack_line(line); + } + + #[test] + fn test_parse_stack_trace_multi() { + let trace = "at foo (https://example.com/app.js:10:5)\n@https://example.com/app.js:20:15"; + let stacks = parse_stack_trace(trace); + assert_eq!(stacks.len(), 2); + } +} diff --git a/crates/source_map_parser/src/sourcemap_unpacker.rs b/crates/source_map_parser/src/sourcemap_unpacker.rs new file mode 100644 index 0000000..9adc3b8 --- /dev/null +++ b/crates/source_map_parser/src/sourcemap_unpacker.rs @@ -0,0 +1,12 @@ +use sourcemap::SourceMap; +use std::collections::HashMap; + +pub fn unpack_sources(sm: &SourceMap) -> HashMap { + let mut map = HashMap::new(); + for (idx, source) in sm.sources().enumerate() { + if let Some(view) = sm.get_source_view(idx as u32) { + map.insert(source.to_string(), view.source().to_string()); + } + } + map +} diff --git a/crates/source_map_parser/src/stack_transform.rs b/crates/source_map_parser/src/stack_transform.rs new file mode 100644 index 0000000..84ed67b --- /dev/null +++ b/crates/source_map_parser/src/stack_transform.rs @@ -0,0 +1,161 @@ +use once_cell::sync::Lazy; +use regex::{Regex, RegexSet}; +use serde::Serialize; + +static STACK_LINE_PRIMARY: Lazy = + Lazy::new(|| RegexSet::new(&[r"^at ", r"@.+:\d+:\d+$"]).unwrap()); + +static STACK_LINE_PATTERNS: Lazy> = Lazy::new(|| { + vec![ + Regex::new(r"^at\s+(?P.+?)\s*\((?P.+?):(?P\d+):(?P\d+)\)$").unwrap(), + Regex::new(r"^at\s+(?P.+?):(?P\d+):(?P\d+)$").unwrap(), + Regex::new(r"^(?:async\s+)?(?P[^@]+?)@(?P.+?):(?P\d+):(?P\d+)$") + .unwrap(), + Regex::new(r"^@(?P.+?):(?P\d+):(?P\d+)$").unwrap(), + ] +}); + +static STACK_LINE_FALLBACK: Lazy = Lazy::new(|| { + Regex::new(r"at\s+(?P.+?)?\s*\((?P.+?):(?P\d+):(?P\d+)\)|at\s+(?P.+?):(?P\d+):(?P\d+)").unwrap() +}); + +#[derive(Clone, Debug, Serialize)] +pub struct Stack<'a> { + pub name: &'a str, + pub line: u32, + pub column: u32, + pub source_file: &'a str, + pub original_raw: &'a str, +} + +pub fn parse_stack_line(original_raw: &str) -> Option { + let trimmed = original_raw.trim(); + if trimmed.matches(':').count() < 2 { + return None; + } + if STACK_LINE_PRIMARY.is_match(trimmed) { + for re in STACK_LINE_PATTERNS.iter() { + if let Some(caps) = re.captures(trimmed) { + let name = caps.name("name").map(|m| m.as_str()).unwrap_or(""); + let file = caps.name("url").map(|m| m.as_str()).unwrap_or(""); + let line = caps + .name("line") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + let column = caps + .name("column") + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + return Some(Stack { + name, + line, + column, + source_file: file, + original_raw: trimmed, + }); + } + } + } + if let Some(captures) = STACK_LINE_FALLBACK.captures(trimmed) { + let name = captures.name("name").map(|m| m.as_str()).unwrap_or(""); + let url = captures.name("url"); + let url2 = captures.name("url2"); + let file = url.or(url2).map(|m| m.as_str()).unwrap_or(""); + let line = captures + .name("line") + .or(captures.name("line2")) + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + let column = captures + .name("column") + .or(captures.name("column2")) + .and_then(|m| m.as_str().parse::().ok()) + .unwrap_or(0); + return Some(Stack { + name, + line, + column, + source_file: file, + original_raw: trimmed, + }); + } + None +} + +pub fn parse_stack_trace(trace_string: &str) -> Vec { + trace_string + .lines() + .filter_map(|l| parse_stack_line(l.trim())) + .collect() +} + +#[derive(Debug, Serialize)] +pub struct ErrorStack<'a> { + pub error_raw: &'a str, + pub stacks: Vec>, + pub error_message: String, +} + +impl ErrorStack<'_> { + pub fn from_raw(error_raw: &str) -> ErrorStack { + let mut stacks: Vec = Vec::new(); + let mut error_message = String::new(); + for (index, line) in error_raw.lines().enumerate() { + if index == 0 { + error_message = line.to_string(); + } else if let Some(stack) = parse_stack_line(line.trim()) { + stacks.push(stack); + } + } + ErrorStack { + error_raw, + stacks, + error_message, + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn parse_various_stack_lines() { + let lines = vec![ + "at foo (https://example.com/app.js:10:5)", + "at https://example.com/app.js:20:15", + "async bar@https://example.com/app.js:30:25", + "@https://example.com/app.js:40:35", + ]; + let stacks: Vec<_> = lines.iter().filter_map(|l| parse_stack_line(l)).collect(); + assert_eq!(stacks.len(), lines.len()); + assert_eq!(stacks[0].line, 10); + assert_eq!(stacks[1].column, 15); + // The async prefix is ignored by the regex, only the method name is kept + assert_eq!(stacks[2].name, "bar"); + assert_eq!(stacks[3].line, 40); + } + + #[test] + fn parse_stack_trace_mixed_and_invalid() { + let trace = "at foo (https://a/x.js:1:1)\nINVALID LINE\n@https://a/x.js:2:5"; + let stacks = parse_stack_trace(trace); + assert_eq!(stacks.len(), 2); + assert_eq!(stacks[1].column, 5); + } + + #[test] + fn parse_error_stack_extract_message() { + let raw = "ReferenceError: x is not defined\n at foo (https://a/app.js:3:7)"; + let es = ErrorStack::from_raw(raw); + assert_eq!(es.error_message, "ReferenceError: x is not defined"); + assert_eq!(es.stacks.len(), 1); + assert_eq!(es.stacks[0].line, 3); + } + + #[test] + fn parse_stack_line_rejects_non_stack() { + assert!(parse_stack_line("Just a message without coords").is_none()); + assert!(parse_stack_line("Error: something").is_none()); + } +} diff --git a/crates/source_map_parser/src/token_generator.rs b/crates/source_map_parser/src/token_generator.rs new file mode 100644 index 0000000..070ea91 --- /dev/null +++ b/crates/source_map_parser/src/token_generator.rs @@ -0,0 +1,277 @@ +use serde::Serialize; +use sourcemap::SourceMap; + +#[derive(Serialize, Clone, Debug)] +pub struct SourceMapToken { + pub line: u32, + pub column: u32, + pub source_code: Option, + pub src: Option, +} + +pub fn generate_source_map_token( + source_map_content: &str, + line: u32, + column: u32, +) -> Option { + let source_map = SourceMap::from_reader(source_map_content.as_bytes()).ok()?; + generate_source_map_token_from_map(&source_map, line, column) +} + +pub fn generate_source_map_token_from_map( + source_map: &SourceMap, + line: u32, + column: u32, +) -> Option { + if line == 0 { + return None; + } + if let Some(token) = source_map.lookup_token(line - 1, column) { + Some(SourceMapToken { + line: token.get_src_line() as u32, + column: token.get_src_col() as u32, + source_code: token.get_source_view().map(|v| v.source().to_string()), + src: token.get_source().map(|s| s.to_string()), + }) + } else { + None + } +} + +pub fn get_stack_source( + source_map_content: &str, + line: u32, + column: u32, + offset_line: Option, +) -> Option { + let source_token = generate_source_map_token(source_map_content, line, column)?; + let mut token = Token { + line: source_token.line, + column: source_token.column, + source_code: Vec::new(), + src: source_token.src.clone().unwrap_or_default(), + }; + if let Some(source_code_text) = source_token.source_code { + if let Some(offset) = offset_line { + let end_line = source_token.line + offset; + let start_line = if source_token.line < offset { + 0 + } else { + source_token.line - offset + }; + for line_number in start_line..end_line { + let is_stack_line = line_number == source_token.line; + let raw = source_code_text + .lines() + .nth(line_number as usize) + .unwrap_or("") + .to_string(); + token.source_code.push(SourceCode { + line: line_number, + raw, + is_stack_line, + }); + } + } else { + token.source_code.push(SourceCode { + line: source_token.line, + raw: source_code_text + .lines() + .nth(source_token.line as usize) + .unwrap_or("") + .to_string(), + is_stack_line: true, + }); + } + } + Some(token) +} + +#[derive(Clone, Debug)] +pub struct GenerateTask { + pub source_map_content: String, + pub line: u32, + pub column: u32, + pub source_line_offset: Option, +} + +#[derive(Clone, Debug, Serialize)] +pub struct SourceCode { + pub line: u32, + pub is_stack_line: bool, + pub raw: String, +} + +#[derive(Clone, Debug, Serialize)] +pub struct Token { + pub line: u32, + pub column: u32, + pub source_code: Vec, + pub src: String, +} + +pub struct GenerateToken { + tokens: Vec, + tasks: Vec, +} +impl GenerateToken { + pub fn new() -> Self { + GenerateToken { + tokens: Vec::new(), + tasks: Vec::new(), + } + } + pub fn add_task(&mut self, task: GenerateTask) { + self.tasks.push(task); + } + pub fn generate(&mut self) { + for task in &self.tasks { + if let Some(source_token) = + generate_source_map_token(&task.source_map_content, task.line, task.column) + { + let mut token = Token { + line: source_token.line, + column: source_token.column, + source_code: Vec::new(), + src: source_token.src.clone().unwrap_or_default(), + }; + if let Some(source_code_text) = source_token.source_code { + match task.source_line_offset { + Some(offset) => { + let end_line = source_token.line + offset; + let start_line = if source_token.line < offset { + 0 + } else { + source_token.line - offset + }; + for line_number in start_line..end_line { + let is_stack_line = line_number == source_token.line; + let raw = source_code_text + .lines() + .nth(line_number as usize) + .unwrap_or("") + .to_string(); + token.source_code.push(SourceCode { + line: line_number, + raw, + is_stack_line, + }); + } + } + None => { + token.source_code.push(SourceCode { + line: source_token.line, + raw: source_code_text + .lines() + .nth(source_token.line as usize) + .unwrap_or("") + .to_string(), + is_stack_line: true, + }); + } + } + self.tokens.push(token); + } + } + } + } + pub fn get_tokens(&self) -> Vec { + self.tokens.clone() + } +} + +pub fn generate_context_token_from_map( + sm: &SourceMap, + line: u32, + column: u32, + context_lines: u32, +) -> Option { + if line == 0 { + return None; + } + let sm_token = sm.lookup_token(line - 1, column)?; + let origin_line = sm_token.get_src_line() as u32; + let origin_col = sm_token.get_src_col() as u32; + let src_path = sm_token + .get_source() + .map(|s| s.to_string()) + .unwrap_or_default(); + let mut token = Token { + line: origin_line, + column: origin_col, + source_code: Vec::new(), + src: src_path, + }; + if let Some(view) = sm_token.get_source_view() { + let source_text = view.source(); + let lines: Vec<&str> = source_text.lines().collect(); + let start = origin_line.saturating_sub(context_lines); + let end = origin_line + context_lines; + for ln in start..=end { + let raw = lines.get(ln as usize).cloned().unwrap_or("").to_string(); + token.source_code.push(SourceCode { + line: ln, + is_stack_line: ln == origin_line, + raw, + }); + } + return Some(token); + } + None +} + +#[cfg(test)] +mod tests { + use super::*; + + fn simple_sm(src: &str, content: &str) -> String { + format!( + r#"{{"version":3,"file":"min.js","sources":["{src}"],"sourcesContent":["{content}"],"names":[],"mappings":"AAAA"}}"# + ) + } + + #[test] + fn test_get_stack_source_single_line() { + let sm = simple_sm("a.js", "fn()\\n"); + let tok = get_stack_source(&sm, 1, 0, None).expect("token"); + assert_eq!(tok.line, 0); // original line + assert_eq!(tok.source_code.len(), 1); + } + + #[test] + fn test_get_stack_source_with_offset() { + let sm = simple_sm("a.js", "l0()\\nl1()\\nl2()\\n"); + let tok = get_stack_source(&sm, 1, 0, Some(1)).expect("token"); + // offset=1 应至少包含 1 行(目标) + 1 行上/下文(如果存在) + assert!(tok.source_code.len() >= 1); + } + + #[test] + fn test_generate_token_batch() { + let sm = simple_sm("a.js", "l0()\\nl1()\\n"); + let mut gen = GenerateToken::new(); + gen.add_task(GenerateTask { + source_map_content: sm.clone(), + line: 1, + column: 0, + source_line_offset: None, + }); + gen.add_task(GenerateTask { + source_map_content: sm, + line: 1, + column: 0, + source_line_offset: Some(1), + }); + gen.generate(); + let tokens = gen.get_tokens(); + assert_eq!(tokens.len(), 2); + } + + #[test] + fn test_generate_context_token_from_map() { + let sm_raw = simple_sm("a.js", "l0()\\nl1()\\nl2()\\n"); + let sm = SourceMap::from_reader(sm_raw.as_bytes()).unwrap(); + let tok = generate_context_token_from_map(&sm, 1, 0, 1).unwrap(); + assert!(tok.source_code.len() >= 2); + } +} diff --git a/crates/source_map_parser/tests/integration.rs b/crates/source_map_parser/tests/integration.rs new file mode 100644 index 0000000..e1800bb --- /dev/null +++ b/crates/source_map_parser/tests/integration.rs @@ -0,0 +1,73 @@ +use source_map_parser::SourceMapParserClient; + +fn make_sm(sources: &[(&str, &str)]) -> String { + // Single mapping 'AAAA' points to first source first line column 0 + // We'll replicate per source by reusing minimal mapping for simplicity. + // For multi-source, we still can test unpack_all_sources ordering/length. + let mut src_names = Vec::new(); + let mut contents = Vec::new(); + for (name, content) in sources { + src_names.push(format!("\"{name}\"")); + let esc = content.replace('\n', "\\n"); + contents.push(format!("\"{esc}\"")); + } + format!( + "{{\"version\":3,\"file\":\"min.js\",\"sources\":[{}],\"sourcesContent\":[{}],\"names\":[],\"mappings\":\"AAAA\"}}", + src_names.join(","), + contents.join(",") + ) +} + +#[test] +fn map_error_stack_with_context() { + let sm = make_sm(&[("a.js", "l0()\nl1()\nl2()\n")]); + let client = SourceMapParserClient::new(sm.as_bytes()).unwrap(); + let raw = "ReferenceError: x\n at foo (https://example.com/min.js:1:0)"; // maps to origin line 0 + let mapped = client.map_error_stack(raw, Some(1)); + assert_eq!(mapped.error_message, "ReferenceError: x"); + assert!(mapped.frames_with_context.len() >= 1); + let ctx = &mapped.frames_with_context[0]; + assert!(ctx.source_code.len() >= 2); // context lines +} + +#[test] +fn unpack_all_sources_multi() { + let sm = make_sm(&[("a.js", "a()\n"), ("b.js", "b1()\nb2()\n")]); + let client = SourceMapParserClient::new(sm.as_bytes()).unwrap(); + let sources = client.unpack_all_sources(); + assert_eq!(sources.len(), 2); + assert!(sources.get("a.js").unwrap().contains("a()")); +} + +#[test] +fn invalid_source_map_returns_error() { + let bad = b"{ not a valid json"; + let err = SourceMapParserClient::new(bad).err().expect("err"); + // Debug 格式包含 InvalidSourceMap 枚举名称 + let dbg = format!("{err:?}"); + assert!(dbg.contains("InvalidSourceMap")); +} + +#[test] +fn lookup_out_of_range_returns_none() { + let sm = make_sm(&[("a.js", "only()\n")]); + let client = SourceMapParserClient::new(sm.as_bytes()).unwrap(); + // sourcemap 库会按“最近匹配”策略回退,因此超大行通常返回最后一个已知映射而非 None + let hi = client.lookup_token(100, 0).expect("fallback token"); + assert_eq!(hi.line, 0); + // 列超大也应安全 (列 > 长度 仍能 lookup 但通常返回同一行列 0 或 None) + let maybe = client.lookup_token(1, 9999); + // 行有效时允许 Some 或 None, 这里只断言不会 panic 并保持返回结构一致 + if let Some(tok) = maybe { + assert_eq!(tok.line, 0); + } +} + +#[test] +fn context_window_edge_at_start() { + let sm = make_sm(&[("a.js", "l0()\nl1()\nl2()\n")]); + let client = SourceMapParserClient::new(sm.as_bytes()).unwrap(); + let tok = client.lookup_token_with_context(1, 0, 5).unwrap(); + // start 行不足 context 也不会 panic,长度 >= 原行 (1) + min(请求, 实际前后存在) + assert!(tok.source_code.len() >= 1); +} diff --git a/scripts/build-wasm-node.sh b/scripts/build-wasm-node.sh new file mode 100644 index 0000000..927e293 --- /dev/null +++ b/scripts/build-wasm-node.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd) +CRATE_DIR="$ROOT_DIR/crates/node_sdk" + +if ! command -v wasm-pack >/dev/null 2>&1; then + echo "Error: wasm-pack not found. Install via: cargo install wasm-pack or official installer" >&2 + exit 1 +fi + +pushd "$CRATE_DIR" >/dev/null +wasm-pack build --target nodejs --release "$@" +popd >/dev/null + +printf "\nDone. Output at crates/node_sdk/pkg\n" diff --git a/scripts/generate-changelog.sh b/scripts/generate-changelog.sh new file mode 100644 index 0000000..1668eaf --- /dev/null +++ b/scripts/generate-changelog.sh @@ -0,0 +1,124 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Usage: scripts/generate-changelog.sh [repo_url] +# Reads git history since last tag, parses Conventional Commits, groups by type & scope, +# detects BREAKING CHANGES, generates compare links for GitHub/GitLab. + +NEW_VERSION="$1" +REPO_URL="${2:-}" # e.g. https://github.com/OWNER/REPO or https://gitlab.com/group/project +DATE=$(date +%Y-%m-%d) +CHANGELOG_FILE=CHANGELOG.md + +LAST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "") +if [ -z "$LAST_TAG" ]; then + RANGE="" +else + RANGE="${LAST_TAG}..HEAD" +fi + +echo "Generating changelog for version $NEW_VERSION (since $LAST_TAG)" >&2 + +# Collect commits +COMMITS=$(git log --pretty=format:'%s||%h||%b' $RANGE) + +declare -A sections +sections[feat]="" +sections[fix]="" +sections[perf]="" +sections[refactor]="" +sections[docs]="" +sections[test]="" +sections[chore]="" +sections[build]="" +sections[ci]="" +sections[style]="" +sections[other]="" +breaking_section="" + +while IFS= read -r line; do + [ -z "$line" ] && continue + subject_part=${line%%||*} + rest=${line#*||} + hash=${rest%%||*} + body=${rest#*||} + + # Conventional Commit regex: type(scope)!: description + if [[ $subject_part =~ ^([a-zA-Z]+)(\(([a-zA-Z0-9_-]+)\))?(!)?:[[:space:]](.+) ]]; then + type=${BASH_REMATCH[1],,} + scope=${BASH_REMATCH[3]:-} + bang=${BASH_REMATCH[4]:-} + desc=${BASH_REMATCH[5]} + else + type="other" + scope="" + desc="$subject_part" + bang="" + fi + + short_ref=$hash + if [ -n "$REPO_URL" ]; then + # Normalize repo URL (remove .git suffix if present) + repo_clean=${REPO_URL%.git} + if [[ $repo_clean == *github.com* || $repo_clean == *gitlab.com* ]]; then + short_ref="[$hash]($repo_clean/commit/$hash)" + fi + fi + + scope_prefix="" + [ -n "$scope" ] && scope_prefix="**$scope:** " + formatted="- ${scope_prefix}${desc} (${short_ref})" + + # Detect breaking in subject or body + if [ -n "$bang" ] || grep -qi '^BREAKING CHANGE' <<< "$body"; then + breaking_section+="$formatted\n" + fi + + # Append to section + if [[ -z ${sections[$type]+_} ]]; then + sections[other]+="$formatted\n" + else + sections[$type]+="$formatted\n" + fi +done <<< "$COMMITS" + +build_section() { + local title="$1"; shift + local body="$1"; shift || true + if [ -n "$body" ]; then + echo "### $title" + echo -e "$body" + echo + fi +} + +TMP_FILE=$(mktemp) +{ + echo "## v$NEW_VERSION - $DATE" + if [ -n "$REPO_URL" ] && [ -n "$LAST_TAG" ]; then + repo_clean=${REPO_URL%.git} + if [[ $repo_clean == *github.com* || $repo_clean == *gitlab.com* ]]; then + echo "[Compare changes]($repo_clean/compare/$LAST_TAG...v$NEW_VERSION)" + echo + fi + fi + build_section "Breaking Changes" "$breaking_section" + build_section "Features" "${sections[feat]}" + build_section "Fixes" "${sections[fix]}" + build_section "Performance" "${sections[perf]}" + build_section "Refactors" "${sections[refactor]}" + build_section "Docs" "${sections[docs]}" + build_section "Tests" "${sections[test]}" + build_section "Build" "${sections[build]}" + build_section "CI" "${sections[ci]}" + build_section "Style" "${sections[style]}" + build_section "Chore" "${sections[chore]}" + build_section "Other" "${sections[other]}" +} > "$TMP_FILE" + +if [ -f "$CHANGELOG_FILE" ]; then + cat "$CHANGELOG_FILE" >> "$TMP_FILE" +fi +mv "$TMP_FILE" "$CHANGELOG_FILE" + +echo "Changelog updated: $CHANGELOG_FILE" >&2