diff --git a/stream/.vscode/settings.json b/stream/.vscode/settings.json new file mode 100644 index 0000000..9fda356 --- /dev/null +++ b/stream/.vscode/settings.json @@ -0,0 +1,38 @@ +{ + "editor.formatOnSave": true, + "editor.codeActionsOnSave": { + "source.organizeImports.biome": "explicit", + "source.fixAll.biome": "explicit", + "tailwindCSS.sortClasses": "explicit" + }, + "typescript.preferences.importModuleSpecifier": "non-relative", + "javascript.preferences.importModuleSpecifier": "non-relative", + "tailwindCSS.classAttributes": ["class", "className", "ngClass"], + "tailwindCSS.includeLanguages": { + "typescript": "javascript", + "typescriptreact": "javascript" + }, + "editor.defaultFormatter": "biomejs.biome", + "[javascript]": { + "editor.defaultFormatter": "biomejs.biome" + }, + "[typescript]": { + "editor.defaultFormatter": "biomejs.biome", + "editor.formatOnSave": true + }, + "[javascriptreact]": { + "editor.defaultFormatter": "biomejs.biome" + }, + "[typescriptreact]": { + "editor.defaultFormatter": "biomejs.biome", + "editor.formatOnSave": true + }, + "[json]": { + "editor.defaultFormatter": "biomejs.biome", + "editor.formatOnSave": true + }, + "[jsonc]": { + "editor.defaultFormatter": "biomejs.biome", + "editor.formatOnSave": true + } +} diff --git a/stream/package.json b/stream/package.json index e585e94..645deea 100644 --- a/stream/package.json +++ b/stream/package.json @@ -58,6 +58,7 @@ "react": "^19.1.1", "react-day-picker": "^9.11.1", "react-dom": "^19.1.1", + "react-markdown": "^10.1.0", "react-virtuoso": "^4.14.0", "remark": "^15.0.1", "remark-gfm": "^4.0.1", diff --git a/stream/pnpm-lock.yaml b/stream/pnpm-lock.yaml index f2a742f..683db1f 100644 --- a/stream/pnpm-lock.yaml +++ b/stream/pnpm-lock.yaml @@ -134,6 +134,9 @@ importers: react-dom: specifier: ^19.1.1 version: 19.2.0(react@19.2.0) + react-markdown: + specifier: ^10.1.0 + version: 10.1.0(@types/react@19.2.2)(react@19.2.0) react-virtuoso: specifier: ^4.14.0 version: 4.14.1(react-dom@19.2.0(react@19.2.0))(react@19.2.0) @@ -1676,9 +1679,15 @@ packages: '@types/debug@4.1.12': resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} + '@types/estree-jsx@1.0.5': + resolution: {integrity: sha512-52CcUVNFyfb1A2ALocQw/Dd1BQFNmSdkuC3BkZ6iqhdMfQz7JWOFRuJFloOzjk+6WijU56m9oKXFAXc7o3Towg==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} + '@types/hast@3.0.4': + resolution: {integrity: sha512-WPs+bbQw5aCj+x6laNGWLH3wviHtoCv/P3+otBhbOhJgG8qtpdAMlTCxLtsTWA7LH1Oh/bFCHsBn0TPS5m30EQ==} + '@types/linkify-it@3.0.5': resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} @@ -1720,12 +1729,18 @@ packages: '@types/react@19.2.2': resolution: {integrity: sha512-6mDvHUFSjyT2B2yeNx2nUgMxh9LtOWvkhIU3uePn2I2oyNymUAX1NIsdgviM4CH+JSrp2D2hsMvJOkxY+0wNRA==} + '@types/unist@2.0.11': + resolution: {integrity: sha512-CmBKiL6NNo/OqgmMn95Fk9Whlp2mtvIv+KNpQKN2F4SjvrEesubTRWGYSg+BnWZOnlCaSTU1sMpsBOzgbYhnsA==} + '@types/unist@3.0.3': resolution: {integrity: sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==} '@types/use-sync-external-store@0.0.6': resolution: {integrity: sha512-zFDAD+tlpf2r4asuHEj0XH6pY6i0g5NeAHPn+15wk3BV6JA69eERFXC1gyGThDkVa1zCyKr5jox1+2LbV/AMLg==} + '@ungap/structured-clone@1.3.0': + resolution: {integrity: sha512-WmoN8qaIAo7WTYWbAZuG8PYEhn5fkz7dZrqTBZ7dtt//lL2Gwms1IcnQ5yHqjDfX8Ft5j4YzDM23f87zBfDe9g==} + '@vercel/oidc@3.0.3': resolution: {integrity: sha512-yNEQvPcVrK9sIe637+I0jD6leluPxzwJKx/Haw6F4H77CdDsszUn5V3o96LPziXkSNE2B83+Z3mjqGKBK/R6Gg==} engines: {node: '>= 20'} @@ -1777,9 +1792,18 @@ packages: ccount@2.0.1: resolution: {integrity: sha512-eyrF0jiFpY+3drT6383f1qhkbGsLSifNAjA61IUjZjmLCWjItY6LB9ft9YhoDgwfmclB2zhu51Lc7+95b8NRAg==} + character-entities-html4@2.1.0: + resolution: {integrity: sha512-1v7fgQRj6hnSwFpq1Eu0ynr/CDEw0rXo2B61qXrLNdHZmPKgb7fqS1a2JwF0rISo9q77jDI8VMEHoApn8qDoZA==} + + character-entities-legacy@3.0.0: + resolution: {integrity: sha512-RpPp0asT/6ufRm//AJVwpViZbGM/MkjQFxJccQRHmISF/22NBtsHqAWmL+/pmkPWoIUJdWyeVleTl1wydHATVQ==} + character-entities@2.0.2: resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + character-reference-invalid@2.0.1: + resolution: {integrity: sha512-iBZ4F4wRbyORVsu0jPV7gXkOsGYjGHPmAyv+HiHG8gi5PtC9KI2j1+v8/tlibRvjoWX027ypmG/n0HtO5t7unw==} + chownr@3.0.0: resolution: {integrity: sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g==} engines: {node: '>=18'} @@ -1800,6 +1824,9 @@ packages: react: ^18 || ^19 || ^19.0.0-rc react-dom: ^18 || ^19 || ^19.0.0-rc + comma-separated-tokens@2.0.3: + resolution: {integrity: sha512-Fu4hJdvzeylCfQPp9SGWidpzrMs7tTrlu6Vb8XGaRGck8QSNZJJp538Wrb60Lax4fPwR64ViY468OIUTbRlGZg==} + convert-source-map@2.0.0: resolution: {integrity: sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==} @@ -1874,6 +1901,9 @@ packages: resolution: {integrity: sha512-/veY75JbMK4j1yjvuUxuVsiS/hr/4iHs9FTT6cgTexxdE0Ly/glccBAkloH/DofkjRbZU3bnoj38mOmhkZ0lHw==} engines: {node: '>=12'} + estree-util-is-identifier-name@3.0.0: + resolution: {integrity: sha512-hFtqIDZTIUZ9BXLb8y4pYGyk6+wekIivNVTcmvk8NoOh+VeRn5y6cEHzbURrWbfp1fIqdVipilzj+lfaadNZmg==} + eventsource-parser@3.0.6: resolution: {integrity: sha512-Vo1ab+QXPzZ4tCa8SwIHJFaSzy4R6SHf7BY79rFBDf0idraZWAkYrDjDj8uWaSm3S2TK+hJ7/t1CEmZ7jXw+pg==} engines: {node: '>=18.0.0'} @@ -1912,6 +1942,30 @@ packages: graceful-fs@4.2.11: resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} + hast-util-to-jsx-runtime@2.3.6: + resolution: {integrity: sha512-zl6s8LwNyo1P9uw+XJGvZtdFF1GdAkOg8ujOw+4Pyb76874fLps4ueHXDhXWdk6YHQ6OgUtinliG7RsYvCbbBg==} + + hast-util-whitespace@3.0.0: + resolution: {integrity: sha512-88JUN06ipLwsnv+dVn+OIYOvAuvBMy/Qoi6O7mQHxdPXpjy+Cd6xRkWwux7DKO+4sYILtLBRIKgsdpS2gQc7qw==} + + html-url-attributes@3.0.1: + resolution: {integrity: sha512-ol6UPyBWqsrO6EJySPz2O7ZSr856WDrEzM5zMqp+FJJLGMW35cLYmmZnl0vztAZxRUoNZJFTCohfjuIJ8I4QBQ==} + + inline-style-parser@0.2.7: + resolution: {integrity: sha512-Nb2ctOyNR8DqQoR0OwRG95uNWIC0C1lCgf5Naz5H6Ji72KZ8OcFZLz2P5sNgwlyoJ8Yif11oMuYs5pBQa86csA==} + + is-alphabetical@2.0.1: + resolution: {integrity: sha512-FWyyY60MeTNyeSRpkM2Iry0G9hpr7/9kD40mD/cGQEuilcZYS4okz8SN2Q6rLCJ8gbCt6fN+rC+6tMGS99LaxQ==} + + is-alphanumerical@2.0.1: + resolution: {integrity: sha512-hmbYhX/9MUMF5uh7tOXyK/n0ZvWpad5caBA17GsC6vyuCqaWliRG5K1qS9inmUhEMaOBIW7/whAnSwveW/LtZw==} + + is-decimal@2.0.1: + resolution: {integrity: sha512-AAB9hiomQs5DXWcRB1rqsxGUstbRroFOPPVAomNk/3XHR5JyEZChOyTWe2oayKnsSsr/kcGqF+z6yuH6HHpN0A==} + + is-hexadecimal@2.0.1: + resolution: {integrity: sha512-DgZQp241c8oO6cA1SbTEWiXeoxV42vlcJxgH+B3hi1AiqqKruZR3ZGF8In3fj4+/y/7rHvlOZLZtgJ/4ttYGZg==} + is-plain-obj@4.1.0: resolution: {integrity: sha512-+Pgi+vMuUNkJyExiMBt5IlFoMyKnr5zhJ4Uspz58WOhBF5QoIZkFyNHIbBAtHwzVAgk5RtndVNsDRN61/mmDqg==} engines: {node: '>=12'} @@ -2052,9 +2106,21 @@ packages: mdast-util-gfm@3.1.0: resolution: {integrity: sha512-0ulfdQOM3ysHhCJ1p06l0b0VKlhU0wuQs3thxZQagjcjPrlFRqY215uZGHHJan9GEAXd9MbfPjFJz+qMkVR6zQ==} + mdast-util-mdx-expression@2.0.1: + resolution: {integrity: sha512-J6f+9hUp+ldTZqKRSg7Vw5V6MqjATc+3E4gf3CFNcuZNWD8XdyI6zQ8GqH7f8169MM6P7hMBRDVGnn7oHB9kXQ==} + + mdast-util-mdx-jsx@3.2.0: + resolution: {integrity: sha512-lj/z8v0r6ZtsN/cGNNtemmmfoLAFZnjMbNyLzBafjzikOM+glrjNHPlf6lQDOTccj9n5b0PPihEBbhneMyGs1Q==} + + mdast-util-mdxjs-esm@2.0.1: + resolution: {integrity: sha512-EcmOpxsZ96CvlP03NghtH1EsLtr0n9Tm4lPUJUBccV9RwUOneqSycg19n5HGzCf+10LozMRSObtVr3ee1WoHtg==} + mdast-util-phrasing@4.1.0: resolution: {integrity: sha512-TqICwyvJJpBwvGAMZjj4J2n0X8QWp21b9l0o7eXyVJ25YNWYbJDVIyD1bZXE6WtV6RmKJVYmQAKWa0zWOABz2w==} + mdast-util-to-hast@13.2.1: + resolution: {integrity: sha512-cctsq2wp5vTsLIcaymblUriiTcZd0CwWtCbLvrOzYCDZoWyMNV8sZ7krj09FSnsiJi3WVsHLM4k6Dq/yaPyCXA==} + mdast-util-to-markdown@2.1.2: resolution: {integrity: sha512-xj68wMTvGXVOKonmog6LwyJKrYXZPvlwabaryTjLh9LuvovB/KAH+kvi8Gjj+7rJjsFi23nkUxRQv1KqSroMqA==} @@ -2201,6 +2267,9 @@ packages: orderedmap@2.1.1: resolution: {integrity: sha512-TvAWxi0nDe1j/rtMcWcIj94+Ffe6n7zhow33h40SKxmsmozs6dz/e+EajymfoFcHd7sxNn8yHM8839uixMOV6g==} + parse-entities@4.0.2: + resolution: {integrity: sha512-GG2AQYWoLgL877gQIKeRPGO1xF9+eG1ujIb5soS5gPvLQ1y2o8FL90w2QWNdf9I361Mpp7726c+lj3U0qK1uGw==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -2223,6 +2292,9 @@ packages: resolution: {integrity: sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==} engines: {node: ^10 || ^12 || >=14} + property-information@7.1.0: + resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + prosemirror-changeset@2.3.1: resolution: {integrity: sha512-j0kORIBm8ayJNl3zQvD1TTPHJX3g042et6y/KQhZhnPrruO8exkTgG8X+NRpj7kIyMMEx74Xb3DyMIBtO0IKkQ==} @@ -2296,6 +2368,12 @@ packages: peerDependencies: react: ^19.2.0 + react-markdown@10.1.0: + resolution: {integrity: sha512-qKxVopLT/TyA6BX3Ue5NwabOsAzm0Q7kAPwq6L+wWDwisYs7R8vZ0nRXqq6rkueboxpkjvLGU9fWifiX/ZZFxQ==} + peerDependencies: + '@types/react': '>=18' + react: '>=18' + react-refresh@0.17.0: resolution: {integrity: sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==} engines: {node: '>=0.10.0'} @@ -2346,6 +2424,9 @@ packages: remark-parse@11.0.0: resolution: {integrity: sha512-FCxlKLNGknS5ba/1lmpYijMUzX2esxW5xQqjWxw2eHFfS2MSdaHVINFmhjo+qN1WhZhNimq0dZATN9pH0IDrpA==} + remark-rehype@11.1.2: + resolution: {integrity: sha512-Dh7l57ianaEoIpzbp0PC9UKAdCSVklD8E5Rpw7ETfbTl3FqcOOgq5q2LVDhgGCkaBv7p24JXikPdvhhmHvKMsw==} + remark-stringify@11.0.0: resolution: {integrity: sha512-1OSmLd3awB/t8qdoEOMazZkNsfVTeY4fTsgzcQFdXNq8ToTN4ZGwrMnlda4K6smTFKD+GRV6O48i6Z4iKgPPpw==} @@ -2386,6 +2467,18 @@ packages: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} + space-separated-tokens@2.0.2: + resolution: {integrity: sha512-PEGlAwrG8yXGXRjW32fGbg66JAlOAwbObuqVoJpv/mRgoWDQfgH1wDPvtzWyUSNAXBGSk8h755YDbbcEy3SH2Q==} + + stringify-entities@4.0.4: + resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + + style-to-js@1.1.21: + resolution: {integrity: sha512-RjQetxJrrUJLQPHbLku6U/ocGtzyjbJMP9lCNK7Ag0CNh690nSH8woqWH9u16nMjYBAok+i7JO1NP2pOy8IsPQ==} + + style-to-object@1.0.14: + resolution: {integrity: sha512-LIN7rULI0jBscWQYaSswptyderlarFkjQ+t79nzty8tcIAceVomEVlLzH5VP4Cmsv6MtKhs7qaAiwlcp+Mgaxw==} + styled-jsx@5.1.6: resolution: {integrity: sha512-qSVyDTeMotdvQYoHWLNGwRFJHC+i+ZvdBRYosOFgC+Wg1vx4frN2/RG/NA7SYqqvKNLf39P2LSRA2pu6n0XYZA==} engines: {node: '>= 12.0.0'} @@ -2425,6 +2518,9 @@ packages: peerDependencies: '@tiptap/core': ^3.0.1 + trim-lines@3.0.1: + resolution: {integrity: sha512-kRj8B+YHZCc9kQYdWfJB2/oUl9rA99qbowYYBtr4ui4mZyAQ2JpvVBd/6U2YloATfqBhBTSMhTpgBHtU0Mf3Rg==} + trough@2.2.0: resolution: {integrity: sha512-tmMpK00BjZiUyVyvrBK7knerNgmgvcV/KLVyuma/SC+TQN167GrMRciANTz09+k3zW8L8t60jWO1GpfkZdjTaw==} @@ -2451,6 +2547,9 @@ packages: unist-util-is@6.0.0: resolution: {integrity: sha512-2qCTHimwdxLfz+YzdGfkqNlH0tLi9xjTnHddPmJwtIG9MGsdbutfTc4P+haPD7l7Cjxf/WZj+we5qfVPvvxfYw==} + unist-util-position@5.0.0: + resolution: {integrity: sha512-fucsC7HjXvkB5R3kTCO7kUjRdrS0BJt3M/FPxmHMBOm8JQi2BsHAHFsy27E0EolP8rp0NzXsJ+jNPyDWvOJZPA==} + unist-util-stringify-position@4.0.0: resolution: {integrity: sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==} @@ -3874,8 +3973,16 @@ snapshots: dependencies: '@types/ms': 2.1.0 + '@types/estree-jsx@1.0.5': + dependencies: + '@types/estree': 1.0.8 + '@types/estree@1.0.8': {} + '@types/hast@3.0.4': + dependencies: + '@types/unist': 3.0.3 + '@types/linkify-it@3.0.5': {} '@types/linkify-it@5.0.0': {} @@ -3918,10 +4025,14 @@ snapshots: dependencies: csstype: 3.1.3 + '@types/unist@2.0.11': {} + '@types/unist@3.0.3': {} '@types/use-sync-external-store@0.0.6': {} + '@ungap/structured-clone@1.3.0': {} + '@vercel/oidc@3.0.3': {} '@vitejs/plugin-react@5.0.4(vite@7.1.10(@types/node@24.8.1)(jiti@2.6.1)(lightningcss@1.30.1))': @@ -3978,8 +4089,14 @@ snapshots: ccount@2.0.1: {} + character-entities-html4@2.1.0: {} + + character-entities-legacy@3.0.0: {} + character-entities@2.0.2: {} + character-reference-invalid@2.0.1: {} + chownr@3.0.0: {} class-variance-authority@0.7.1: @@ -4002,6 +4119,8 @@ snapshots: - '@types/react' - '@types/react-dom' + comma-separated-tokens@2.0.3: {} + convert-source-map@2.0.0: {} crelt@1.0.6: {} @@ -4076,6 +4195,8 @@ snapshots: escape-string-regexp@5.0.0: {} + estree-util-is-identifier-name@3.0.0: {} + eventsource-parser@3.0.6: {} extend@3.0.2: {} @@ -4097,6 +4218,45 @@ snapshots: graceful-fs@4.2.11: {} + hast-util-to-jsx-runtime@2.3.6: + dependencies: + '@types/estree': 1.0.8 + '@types/hast': 3.0.4 + '@types/unist': 3.0.3 + comma-separated-tokens: 2.0.3 + devlop: 1.1.0 + estree-util-is-identifier-name: 3.0.0 + hast-util-whitespace: 3.0.0 + mdast-util-mdx-expression: 2.0.1 + mdast-util-mdx-jsx: 3.2.0 + mdast-util-mdxjs-esm: 2.0.1 + property-information: 7.1.0 + space-separated-tokens: 2.0.2 + style-to-js: 1.1.21 + unist-util-position: 5.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + hast-util-whitespace@3.0.0: + dependencies: + '@types/hast': 3.0.4 + + html-url-attributes@3.0.1: {} + + inline-style-parser@0.2.7: {} + + is-alphabetical@2.0.1: {} + + is-alphanumerical@2.0.1: + dependencies: + is-alphabetical: 2.0.1 + is-decimal: 2.0.1 + + is-decimal@2.0.1: {} + + is-hexadecimal@2.0.1: {} + is-plain-obj@4.1.0: {} jiti@2.6.1: {} @@ -4266,11 +4426,62 @@ snapshots: transitivePeerDependencies: - supports-color + mdast-util-mdx-expression@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + + mdast-util-mdx-jsx@3.2.0: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/unist': 3.0.3 + ccount: 2.0.1 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + parse-entities: 4.0.2 + stringify-entities: 4.0.4 + unist-util-stringify-position: 4.0.0 + vfile-message: 4.0.3 + transitivePeerDependencies: + - supports-color + + mdast-util-mdxjs-esm@2.0.1: + dependencies: + '@types/estree-jsx': 1.0.5 + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + devlop: 1.1.0 + mdast-util-from-markdown: 2.0.2 + mdast-util-to-markdown: 2.1.2 + transitivePeerDependencies: + - supports-color + mdast-util-phrasing@4.1.0: dependencies: '@types/mdast': 4.0.4 unist-util-is: 6.0.0 + mdast-util-to-hast@13.2.1: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@ungap/structured-clone': 1.3.0 + devlop: 1.1.0 + micromark-util-sanitize-uri: 2.0.1 + trim-lines: 3.0.1 + unist-util-position: 5.0.0 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + mdast-util-to-markdown@2.1.2: dependencies: '@types/mdast': 4.0.4 @@ -4525,6 +4736,16 @@ snapshots: orderedmap@2.1.1: {} + parse-entities@4.0.2: + dependencies: + '@types/unist': 2.0.11 + character-entities-legacy: 3.0.0 + character-reference-invalid: 2.0.1 + decode-named-character-reference: 1.2.0 + is-alphanumerical: 2.0.1 + is-decimal: 2.0.1 + is-hexadecimal: 2.0.1 + picocolors@1.1.1: {} picomatch@4.0.3: {} @@ -4548,6 +4769,8 @@ snapshots: picocolors: 1.1.1 source-map-js: 1.2.1 + property-information@7.1.0: {} + prosemirror-changeset@2.3.1: dependencies: prosemirror-transform: 1.10.4 @@ -4665,6 +4888,24 @@ snapshots: react: 19.2.0 scheduler: 0.27.0 + react-markdown@10.1.0(@types/react@19.2.2)(react@19.2.0): + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + '@types/react': 19.2.2 + devlop: 1.1.0 + hast-util-to-jsx-runtime: 2.3.6 + html-url-attributes: 3.0.1 + mdast-util-to-hast: 13.2.1 + react: 19.2.0 + remark-parse: 11.0.0 + remark-rehype: 11.1.2 + unified: 11.0.5 + unist-util-visit: 5.0.0 + vfile: 6.0.3 + transitivePeerDependencies: + - supports-color + react-refresh@0.17.0: {} react-remove-scroll-bar@2.3.8(@types/react@19.2.2)(react@19.2.0): @@ -4721,6 +4962,14 @@ snapshots: transitivePeerDependencies: - supports-color + remark-rehype@11.1.2: + dependencies: + '@types/hast': 3.0.4 + '@types/mdast': 4.0.4 + mdast-util-to-hast: 13.2.1 + unified: 11.0.5 + vfile: 6.0.3 + remark-stringify@11.0.0: dependencies: '@types/mdast': 4.0.4 @@ -4810,6 +5059,21 @@ snapshots: source-map-js@1.2.1: {} + space-separated-tokens@2.0.2: {} + + stringify-entities@4.0.4: + dependencies: + character-entities-html4: 2.1.0 + character-entities-legacy: 3.0.0 + + style-to-js@1.1.21: + dependencies: + style-to-object: 1.0.14 + + style-to-object@1.0.14: + dependencies: + inline-style-parser: 0.2.7 + styled-jsx@5.1.6(@babel/core@7.28.4)(react@19.2.0): dependencies: client-only: 0.0.1 @@ -4848,6 +5112,8 @@ snapshots: markdown-it-task-lists: 2.1.1 prosemirror-markdown: 1.13.2 + trim-lines@3.0.1: {} + trough@2.2.0: {} tslib@2.8.1: {} @@ -4874,6 +5140,10 @@ snapshots: dependencies: '@types/unist': 3.0.3 + unist-util-position@5.0.0: + dependencies: + '@types/unist': 3.0.3 + unist-util-stringify-position@4.0.0: dependencies: '@types/unist': 3.0.3 diff --git a/stream/src-tauri/src/ipc/git.rs b/stream/src-tauri/src/ipc/git.rs index ae464b3..93a241e 100644 --- a/stream/src-tauri/src/ipc/git.rs +++ b/stream/src-tauri/src/ipc/git.rs @@ -1,9 +1,16 @@ -use std::collections::HashSet; +use std::collections::{HashMap, HashSet}; use chrono::{DateTime, Utc}; -use git2::{self, Repository, Time}; +use git2::{self, DiffOptions, Repository, Time}; +use rayon::prelude::*; use serde::{Deserialize, Serialize}; +/// Maximum number of commits to return per repository to prevent memory issues +const MAX_COMMITS_PER_REPO: usize = 200; + +/// Maximum number of files changed to return per commit +const MAX_FILES_PER_COMMIT: usize = 50; + #[derive(Debug, Serialize, Deserialize)] pub struct GitCommit { pub id: String, @@ -61,26 +68,27 @@ pub(crate) async fn get_git_commits_for_repos( start_timestamp: u64, end_timestamp: u64, ) -> Result, String> { - let mut results = Vec::new(); - let start_seconds = (start_timestamp / 1000) as i64; let end_seconds = (end_timestamp / 1000) as i64; - for repo_path in repo_paths { - let repo_commits = match get_repo_commits(&repo_path, start_seconds, end_seconds) { - Ok(commits) => RepoCommits { - repo_path: repo_path.clone(), - commits, - error: None, - }, - Err(e) => RepoCommits { - repo_path: repo_path.clone(), - commits: Vec::new(), - error: Some(format!("Error reading repository: {}", e)), - }, - }; - results.push(repo_commits); - } + // Process all repos in parallel using rayon + let results: Vec = repo_paths + .par_iter() + .map(|repo_path| { + match get_repo_commits(repo_path, start_seconds, end_seconds) { + Ok(commits) => RepoCommits { + repo_path: repo_path.clone(), + commits, + error: None, + }, + Err(e) => RepoCommits { + repo_path: repo_path.clone(), + commits: Vec::new(), + error: Some(format!("Error reading repository: {}", e)), + }, + } + }) + .collect(); Ok(results) } @@ -95,91 +103,86 @@ fn time_to_iso_date(time: Time) -> String { dt.format("%Y-%m-%d").to_string() } -fn get_branches_for_commit( +/// Build a map of commit OID -> (branches, is_on_remote) for all branch tips +/// This is much more efficient than walking history for each commit +fn build_branch_tip_map( repo: &Repository, - commit_oid: git2::Oid, -) -> Result<(Vec, bool), Box> { - let mut all_branches = HashSet::new(); - let mut main_branches = HashSet::new(); - let mut feature_branches = HashSet::new(); - let mut found_on_remote = false; +) -> Result, bool)>, Box> { + let mut tip_map: HashMap, bool)> = HashMap::new(); + // Process local branches - just get the tip commits let local_branches = repo.branches(Some(git2::BranchType::Local))?; for branch in local_branches { let (branch, _) = branch?; if let Some(name) = branch.name()? { let reference = branch.get(); if let Some(target) = reference.target() { - let mut revwalk = repo.revwalk()?; - revwalk.push(target)?; - - for oid in revwalk { - let oid = oid?; - if oid == commit_oid { - all_branches.insert(name.to_string()); - if is_main_branch(name) { - main_branches.insert(normalize_branch_name(name)); - } else { - feature_branches.insert(name.to_string()); - } - break; - } - } + let entry = tip_map.entry(target).or_insert_with(|| (Vec::new(), false)); + entry.0.push(name.to_string()); } } } + // Process remote branches - just get the tip commits let remote_branches = repo.branches(Some(git2::BranchType::Remote))?; for branch in remote_branches { let (branch, _) = branch?; if let Some(name) = branch.name()? { let reference = branch.get(); if let Some(target) = reference.target() { - let mut revwalk = repo.revwalk()?; - revwalk.push(target)?; - - for oid in revwalk { - let oid = oid?; - if oid == commit_oid { - found_on_remote = true; - - let normalized = normalize_branch_name(name); - if !all_branches.contains(&normalized) { - all_branches.insert(name.to_string()); - if is_main_branch(name) { - main_branches.insert(normalized); - } else if feature_branches.len() < 3 { - feature_branches.insert(name.to_string()); - } - } - break; - } + let entry = tip_map.entry(target).or_insert_with(|| (Vec::new(), false)); + entry.1 = true; // Mark as on remote + let normalized = normalize_branch_name(name); + if !entry.0.contains(&normalized) { + entry.0.push(normalized); } } } } - let mut result = Vec::new(); - - if !main_branches.is_empty() { - if main_branches.contains("main") { - result.push("main".to_string()); - } else if main_branches.contains("master") { - result.push("master".to_string()); - } else if main_branches.contains("develop") { - result.push("develop".to_string()); - } else if let Some(branch) = main_branches.iter().next() { - result.push(branch.clone()); - } - } else { - result.extend(feature_branches.into_iter().take(2)); + Ok(tip_map) +} + +/// Get the primary branch for a commit using a simplified approach +/// Instead of walking all branch histories, we check if commit is reachable from main branches +fn get_branch_for_commit_fast( + repo: &Repository, + commit_oid: git2::Oid, + branch_tip_map: &HashMap, bool)>, +) -> (Vec, bool) { + // First check if this commit is a branch tip (fast path) + if let Some((branches, is_remote)) = branch_tip_map.get(&commit_oid) { + let mut result = branches.clone(); + // Prioritize main branches + result.sort_by(|a, b| { + let a_main = is_main_branch(a); + let b_main = is_main_branch(b); + b_main.cmp(&a_main) + }); + result.truncate(2); + return (result, *is_remote); } - if result.is_empty() { - result.push("unknown".to_string()); + // For non-tip commits, check if reachable from main/master only (for performance) + // This is a simplified check - we don't try to find ALL branches + let main_branch_names = ["main", "master", "origin/main", "origin/master"]; + + for branch_name in &main_branch_names { + if let Ok(reference) = repo.find_reference(&format!("refs/heads/{}", branch_name)) + .or_else(|_| repo.find_reference(&format!("refs/remotes/{}", branch_name))) + { + if let Some(target) = reference.target() { + // Check if commit is an ancestor of the branch tip (limited depth) + if let Ok(true) = repo.graph_descendant_of(target, commit_oid) { + let is_remote = branch_name.starts_with("origin/"); + return (vec![normalize_branch_name(branch_name)], is_remote); + } + } + } } - Ok((result, found_on_remote)) + // Default: assume it's on some branch and likely pushed + (vec!["main".to_string()], true) } fn normalize_branch_name(branch_name: &str) -> String { @@ -299,6 +302,51 @@ fn build_commit_url(remote_url: &str, commit_id: &str) -> Option { } } +/// Get files changed for a commit using optimized diff options (no content, just file names) +fn get_files_changed_fast( + repo: &Repository, + commit: &git2::Commit, +) -> Vec { + let mut files_changed = Vec::new(); + + let parent = match commit.parent(0) { + Ok(p) => p, + Err(_) => return files_changed, // Initial commit or error + }; + + let tree = match commit.tree() { + Ok(t) => t, + Err(_) => return files_changed, + }; + + let parent_tree = match parent.tree() { + Ok(t) => t, + Err(_) => return files_changed, + }; + + // Configure diff to skip content computation entirely + let mut diff_opts = DiffOptions::new(); + diff_opts.skip_binary_check(true); // Don't check if files are binary + diff_opts.ignore_submodules(true); // Skip submodule processing + diff_opts.context_lines(0); // No context lines needed + + let diff = match repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), Some(&mut diff_opts)) { + Ok(d) => d, + Err(_) => return files_changed, + }; + + // Use deltas() iterator - much faster than foreach, no callbacks + for delta in diff.deltas().take(MAX_FILES_PER_COMMIT) { + if let Some(path) = delta.new_file().path() { + if let Some(path_str) = path.to_str() { + files_changed.push(path_str.to_string()); + } + } + } + + files_changed +} + fn get_repo_commits( repo_path: &str, start_seconds: i64, @@ -312,72 +360,78 @@ fn get_repo_commits( revwalk.set_sorting(git2::Sort::TIME)?; let remote_url = get_remote_url(&repo); + + // Build branch tip map once upfront (much faster than per-commit checks) + let branch_tip_map = build_branch_tip_map(&repo).unwrap_or_default(); let mut commits = Vec::new(); let mut seen_commits = HashSet::new(); for oid in revwalk { - let oid = oid?; - let commit = repo.find_commit(oid)?; - let commit_time = commit.time(); - let commit_timestamp = commit_time.seconds(); + // Stop early if we've reached the limit + if commits.len() >= MAX_COMMITS_PER_REPO { + break; + } + + let oid = match oid { + Ok(oid) => oid, + Err(_) => continue, + }; if seen_commits.contains(&oid) { continue; } seen_commits.insert(oid); - if commit_timestamp >= start_seconds && commit_timestamp <= end_seconds { - let author = commit.author(); - let message = commit.message().unwrap_or("").to_string(); - - let mut files_changed = Vec::new(); - if let Some(parent) = commit.parent(0).ok() { - let tree = commit.tree()?; - let parent_tree = parent.tree()?; - let diff = repo.diff_tree_to_tree(Some(&parent_tree), Some(&tree), None)?; - - diff.foreach( - &mut |delta, _| { - if let Some(file) = delta.new_file().path() { - if let Some(path_str) = file.to_str() { - files_changed.push(path_str.to_string()); - } - } - true - }, - None, - None, - None, - )?; - } + let commit = match repo.find_commit(oid) { + Ok(c) => c, + Err(_) => continue, + }; + + let commit_time = commit.time(); + let commit_timestamp = commit_time.seconds(); - let (branches, is_on_remote) = get_branches_for_commit(&repo, oid)?; - - let commit_id = format!("{}", oid); - let url = if is_on_remote { - remote_url - .as_ref() - .and_then(|remote| build_commit_url(remote, &commit_id)) - } else { - None - }; - - let git_commit = GitCommit { - id: commit_id, - message: message.lines().next().unwrap_or("").to_string(), - author_name: author.name().unwrap_or("Unknown").to_string(), - author_email: author.email().unwrap_or("").to_string(), - timestamp: time_to_timestamp_ms(commit_time), - date: time_to_iso_date(commit_time), - repo_path: repo_path.to_string(), - files_changed, - branches, - url, - }; - - commits.push(git_commit); + // Skip commits outside the date range + // Since we're sorted by time, we can break early if we're past the range + if commit_timestamp < start_seconds { + break; + } + if commit_timestamp > end_seconds { + continue; } + + let author = commit.author(); + let message = commit.message().unwrap_or("").to_string(); + + // Get files changed using optimized method (no diff content) + let files_changed = get_files_changed_fast(&repo, &commit); + + // Use the fast branch detection + let (branches, is_on_remote) = get_branch_for_commit_fast(&repo, oid, &branch_tip_map); + + let commit_id = format!("{}", oid); + let url = if is_on_remote { + remote_url + .as_ref() + .and_then(|remote| build_commit_url(remote, &commit_id)) + } else { + None + }; + + let git_commit = GitCommit { + id: commit_id, + message: message.lines().next().unwrap_or("").to_string(), + author_name: author.name().unwrap_or("Unknown").to_string(), + author_email: author.email().unwrap_or("").to_string(), + timestamp: time_to_timestamp_ms(commit_time), + date: time_to_iso_date(commit_time), + repo_path: repo_path.to_string(), + files_changed, + branches, + url, + }; + + commits.push(git_commit); } commits.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); diff --git a/stream/src-tauri/tauri.conf.json b/stream/src-tauri/tauri.conf.json index b65d6aa..f9f1750 100644 --- a/stream/src-tauri/tauri.conf.json +++ b/stream/src-tauri/tauri.conf.json @@ -1,7 +1,7 @@ { "$schema": "https://schema.tauri.app/config/2", "productName": "stream", - "version": "0.2.6", + "version": "0.2.7", "identifier": "com.marcelmarais.stream", "build": { "beforeDevCommand": "pnpm dev", diff --git a/stream/src/app/browse/timeline/page.tsx b/stream/src/app/browse/timeline/page.tsx index b75b9cf..2c1be6a 100644 --- a/stream/src/app/browse/timeline/page.tsx +++ b/stream/src/app/browse/timeline/page.tsx @@ -2,8 +2,16 @@ import { CalendarPlusIcon, FileTextIcon } from "@phosphor-icons/react"; import { useQueryClient } from "@tanstack/react-query"; +import { throttle } from "lodash-es"; import { useRouter, useSearchParams } from "next/navigation"; -import { Suspense, useCallback, useEffect, useRef, useState } from "react"; +import { + Suspense, + useCallback, + useEffect, + useMemo, + useRef, + useState, +} from "react"; import { Virtuoso, type VirtuosoHandle } from "react-virtuoso"; import { Footer } from "@/components/footer"; import { FileCard, FocusedFileOverlay } from "@/components/markdown-file-card"; @@ -153,30 +161,37 @@ function TimelineView({ folderPath }: { folderPath: string }) { [folderPath, queryClient], ); + // Throttle prefetching to avoid hammering during fast scroll + const throttledPrefetch = useMemo( + () => + throttle( + async (visibleFiles: MarkdownFileMetadata[]) => { + const filePaths = visibleFiles.map((file) => file.filePath); + await prefetchFileContents(filePaths); + + if (visibleFiles.length > 0) { + const dateKeys = visibleFiles.map((file) => { + const dateFromFilename = getDateFromFilename(file.fileName); + return dateFromFilename || getDateKey(file.createdAt); + }); + await prefetchCommitsForDates(folderPath, dateKeys); + } + }, + 300, // Only prefetch at most once every 300ms + { leading: true, trailing: true }, + ), + [prefetchFileContents, prefetchCommitsForDates, folderPath], + ); + const handleRangeChanged = useCallback( - async (range: { startIndex: number; endIndex: number }) => { + (range: { startIndex: number; endIndex: number }) => { const visibleFiles = allFilesMetadata.slice( range.startIndex, range.endIndex + 1, ); - - const filePaths = visibleFiles.map((file) => file.filePath); - await prefetchFileContents(filePaths); - - if (visibleFiles.length > 0) { - const dateKeys = visibleFiles.map((file) => { - const dateFromFilename = getDateFromFilename(file.fileName); - return dateFromFilename || getDateKey(file.createdAt); - }); - await prefetchCommitsForDates(folderPath, dateKeys); - } + throttledPrefetch(visibleFiles); }, - [ - allFilesMetadata, - prefetchFileContents, - prefetchCommitsForDates, - folderPath, - ], + [allFilesMetadata, throttledPrefetch], ); const renderItem = useCallback( @@ -245,7 +260,7 @@ function TimelineView({ folderPath }: { folderPath: string }) { totalCount={allFilesMetadata.length} itemContent={renderItem} rangeChanged={handleRangeChanged} - overscan={25} + overscan={5} className="h-full" /> diff --git a/stream/src/components/markdown-file-card.tsx b/stream/src/components/markdown-file-card.tsx index addfe7c..1a84a03 100644 --- a/stream/src/components/markdown-file-card.tsx +++ b/stream/src/components/markdown-file-card.tsx @@ -189,9 +189,6 @@ export function FileCard({ const { data: commitsByDate = {} } = useCommitsForDate( folderPath, file.dateFromFilename, - { - autoRefresh: true, - }, ); const commits = filterCommitsForDate(commitsByDate, file.dateFromFilename); @@ -339,7 +336,7 @@ export function FocusedFileOverlay({ const { data: commitsByDate = {} } = useCommitsForDate( folderPath || "", file.dateFromFilename, - { autoRefresh: true }, + { autoRefresh: true }, // Only auto-refresh when expanded/focused ); const commits = filterCommitsForDate(commitsByDate, file.dateFromFilename); diff --git a/stream/src/hooks/use-git-queries.ts b/stream/src/hooks/use-git-queries.ts index 3d07e76..abbcb9f 100644 --- a/stream/src/hooks/use-git-queries.ts +++ b/stream/src/hooks/use-git-queries.ts @@ -190,7 +190,8 @@ export function useFetchRepos(folderPath: string) { /** * Hook to get commits for a specific date - * Set autoRefresh to true to refetch every 5 seconds + * Uses longer staleTime to avoid refetching during scroll + * Set autoRefresh for focused/expanded cards only */ export function useCommitsForDate( folderPath: string, @@ -207,9 +208,9 @@ export function useCommitsForDate( return {} as CommitsByDate; } - const startOfDay = date; + const startOfDay = new Date(date); startOfDay.setHours(0, 0, 0, 0); - const endOfDay = date; + const endOfDay = new Date(date); endOfDay.setHours(23, 59, 59, 999); const range = createDateRange.custom(startOfDay, endOfDay); @@ -217,14 +218,15 @@ export function useCommitsForDate( return groupCommitsByDate(repoCommits); }, enabled: enabled && repos.length > 0, - refetchInterval: autoRefresh ? 5000 : false, - staleTime: 5000, + refetchInterval: autoRefresh ? 10000 : false, // Only refresh focused cards, every 10s + staleTime: 60000, // Consider fresh for 1 minute to avoid refetch storms during scroll + gcTime: 300000, // Keep in cache for 5 minutes }); } /** * Hook to get commits for multiple dates (based on visible files) - * Automatically refetches every 5 seconds + * Uses staleTime to prevent refetch storms during rapid scroll */ export function useCommitsForVisibleFiles( folderPath: string, @@ -262,8 +264,8 @@ export function useCommitsForVisibleFiles( return groupCommitsByDate(repoCommits); }, enabled: dateKeys.length > 0 && repos.length > 0, - refetchInterval: 5000, // Auto-refresh every 5 seconds - staleTime: 0, // Always consider stale so refetchInterval works + staleTime: 60000, // Consider fresh for 1 minute + gcTime: 300000, // Keep in cache for 5 minutes })), }); diff --git a/stream/src/hooks/use-markdown-queries.ts b/stream/src/hooks/use-markdown-queries.ts index c1d06f1..a0f2981 100644 --- a/stream/src/hooks/use-markdown-queries.ts +++ b/stream/src/hooks/use-markdown-queries.ts @@ -42,6 +42,7 @@ export function useMarkdownMetadata(folderPath: string) { return metadata; }, enabled: !!folderPath, + staleTime: 30000, // Consider fresh for 30 seconds }); } @@ -57,6 +58,8 @@ export function useMarkdownFileContent(filePath: string | null) { return contentMap.get(filePath) ?? ""; }, enabled: !!filePath, + staleTime: 60000, // Consider fresh for 1 minute + gcTime: 300000, // Keep in cache for 5 minutes }); } diff --git a/stream/src/ipc/habit-reader.ts b/stream/src/ipc/habit-reader.ts index 854cc77..0b75591 100644 --- a/stream/src/ipc/habit-reader.ts +++ b/stream/src/ipc/habit-reader.ts @@ -1,4 +1,10 @@ -import { Store } from "@tauri-apps/plugin-store"; +import { exists, readTextFile, writeTextFile } from "@tauri-apps/plugin-fs"; +import { load } from "@tauri-apps/plugin-store"; + +// Constants for getting the selected folder from settings +const FOLDER_STORAGE_KEY = "stream-last-selected-folder"; +const FOLDER_STORE_FILE = "settings.json"; +const HABITS_FILENAME = "habits.json"; /** * Habit tracking period options @@ -81,20 +87,63 @@ export interface Habit { completions: Record; } -// Store instance for habits -let store: Store | null = null; +/** + * Get the selected folder from settings store + */ +async function getSelectedFolder(): Promise { + try { + const store = await load(FOLDER_STORE_FILE, { + autoSave: true, + defaults: {}, + }); + const savedFolder = await store.get(FOLDER_STORAGE_KEY); + return savedFolder || null; + } catch (error) { + console.warn("Failed to get selected folder:", error); + return null; + } +} /** - * Initialize the habits store + * Get the full path to the habits file in the markdown directory */ -async function getStore(): Promise { - if (!store) { - store = await Store.load("habits.json"); +async function getHabitsFilePath(): Promise { + const folder = await getSelectedFolder(); + if (!folder) { + throw new Error("No folder selected. Please select a folder first."); } - return store; + return folder.endsWith("/") + ? `${folder}${HABITS_FILENAME}` + : `${folder}/${HABITS_FILENAME}`; } -const HABITS_KEY = "habits"; +/** + * Read habits from the JSON file in the markdown directory + */ +async function readHabitsFromFile(): Promise { + try { + const filePath = await getHabitsFilePath(); + const fileExists = await exists(filePath); + if (!fileExists) { + return []; + } + const content = await readTextFile(filePath); + const data = JSON.parse(content); + return data.habits || []; + } catch (error) { + console.error("Error reading habits file:", error); + return []; + } +} + +/** + * Write habits to the JSON file in the markdown directory + */ +async function writeHabitsToFile(habits: Habit[]): Promise { + const filePath = await getHabitsFilePath(); + const content = JSON.stringify({ habits }, null, 2); + await writeTextFile(filePath, content); +} /** * Generate a unique ID for a new habit @@ -218,13 +267,11 @@ export function getCompletionsForPeriod( } /** - * Get all habits from the store + * Get all habits from the file */ export async function getAllHabits(): Promise { try { - const s = await getStore(); - const habits = await s.get(HABITS_KEY); - return habits || []; + return await readHabitsFromFile(); } catch (error) { console.error("Error getting habits:", error); return []; @@ -241,8 +288,7 @@ export async function createHabit( icon?: HabitIcon, ): Promise { try { - const s = await getStore(); - const habits = (await s.get(HABITS_KEY)) || []; + const habits = await readHabitsFromFile(); const newHabit: Habit = { id: generateId(), @@ -255,8 +301,7 @@ export async function createHabit( }; habits.push(newHabit); - await s.set(HABITS_KEY, habits); - await s.save(); + await writeHabitsToFile(habits); return newHabit; } catch (error) { @@ -270,8 +315,7 @@ export async function createHabit( */ export async function deleteHabit(id: string): Promise { try { - const s = await getStore(); - const habits = (await s.get(HABITS_KEY)) || []; + const habits = await readHabitsFromFile(); const filteredHabits = habits.filter((h) => h.id !== id); @@ -279,8 +323,7 @@ export async function deleteHabit(id: string): Promise { throw new Error("Habit not found"); } - await s.set(HABITS_KEY, filteredHabits); - await s.save(); + await writeHabitsToFile(filteredHabits); } catch (error) { console.error("Error deleting habit:", error); throw new Error("Failed to delete habit"); @@ -300,8 +343,7 @@ export async function updateHabit( }, ): Promise { try { - const s = await getStore(); - const habits = (await s.get(HABITS_KEY)) || []; + const habits = await readHabitsFromFile(); const habitIndex = habits.findIndex((h) => h.id === id); if (habitIndex === -1) { @@ -324,8 +366,7 @@ export async function updateHabit( habit.icon = updates.icon; } - await s.set(HABITS_KEY, habits); - await s.save(); + await writeHabitsToFile(habits); return habit; } catch (error) { @@ -342,8 +383,7 @@ export async function incrementCompletion( date: Date, ): Promise { try { - const s = await getStore(); - const habits = (await s.get(HABITS_KEY)) || []; + const habits = await readHabitsFromFile(); const habitIndex = habits.findIndex((h) => h.id === habitId); if (habitIndex === -1) { @@ -354,8 +394,7 @@ export async function incrementCompletion( const habit = habits[habitIndex]; habit.completions[dateKey] = (habit.completions[dateKey] || 0) + 1; - await s.set(HABITS_KEY, habits); - await s.save(); + await writeHabitsToFile(habits); return habit; } catch (error) { @@ -373,8 +412,7 @@ export async function decrementCompletion( date: Date, ): Promise { try { - const s = await getStore(); - const habits = (await s.get(HABITS_KEY)) || []; + const habits = await readHabitsFromFile(); const habitIndex = habits.findIndex((h) => h.id === habitId); if (habitIndex === -1) { @@ -393,8 +431,7 @@ export async function decrementCompletion( delete habit.completions[dateKey]; } - await s.set(HABITS_KEY, habits); - await s.save(); + await writeHabitsToFile(habits); } return habit;